mirror of
https://github.com/simonw/datasette.git
synced 2025-12-10 16:51:24 +01:00
Compare commits
3 commits
main
...
permission
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6e35a6b4f7 | ||
|
|
94be9953c5 | ||
|
|
5140f4e815 |
186 changed files with 6066 additions and 28381 deletions
10
.github/dependabot.yml
vendored
10
.github/dependabot.yml
vendored
|
|
@ -5,7 +5,9 @@ updates:
|
|||
schedule:
|
||||
interval: daily
|
||||
time: "13:00"
|
||||
groups:
|
||||
python-packages:
|
||||
patterns:
|
||||
- "*"
|
||||
open-pull-requests-limit: 10
|
||||
ignore:
|
||||
- dependency-name: black
|
||||
versions:
|
||||
- 21.4b0
|
||||
- 21.4b1
|
||||
|
|
|
|||
35
.github/workflows/deploy-branch-preview.yml
vendored
35
.github/workflows/deploy-branch-preview.yml
vendored
|
|
@ -1,35 +0,0 @@
|
|||
name: Deploy a Datasette branch preview to Vercel
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
branch:
|
||||
description: "Branch to deploy"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
deploy-branch-preview:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python 3.11
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.11"
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pip install datasette-publish-vercel
|
||||
- name: Deploy the preview
|
||||
env:
|
||||
VERCEL_TOKEN: ${{ secrets.BRANCH_PREVIEW_VERCEL_TOKEN }}
|
||||
run: |
|
||||
export BRANCH="${{ github.event.inputs.branch }}"
|
||||
wget https://latest.datasette.io/fixtures.db
|
||||
datasette publish vercel fixtures.db \
|
||||
--branch $BRANCH \
|
||||
--project "datasette-preview-$BRANCH" \
|
||||
--token $VERCEL_TOKEN \
|
||||
--scope datasette \
|
||||
--about "Preview of $BRANCH" \
|
||||
--about_url "https://github.com/simonw/datasette/tree/$BRANCH"
|
||||
84
.github/workflows/deploy-latest.yml
vendored
84
.github/workflows/deploy-latest.yml
vendored
|
|
@ -1,11 +1,10 @@
|
|||
name: Deploy latest.datasette.io
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
# - 1.0-dev
|
||||
- 1.0-dev
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
|
@ -15,12 +14,18 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out datasette
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.13"
|
||||
cache: pip
|
||||
python-version: "3.9"
|
||||
- uses: actions/cache@v3
|
||||
name: Configure pip caching
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
|
|
@ -32,19 +37,13 @@ jobs:
|
|||
run: |
|
||||
pytest -n auto -m "not serial"
|
||||
pytest -m "serial"
|
||||
- name: Build fixtures.db and other files needed to deploy the demo
|
||||
run: |-
|
||||
python tests/fixtures.py \
|
||||
fixtures.db \
|
||||
fixtures-config.json \
|
||||
fixtures-metadata.json \
|
||||
plugins \
|
||||
--extra-db-filename extra_database.db
|
||||
- name: Build fixtures.db
|
||||
run: python tests/fixtures.py fixtures.db fixtures.json plugins --extra-db-filename extra_database.db
|
||||
- name: Build docs.db
|
||||
if: ${{ github.ref == 'refs/heads/main' }}
|
||||
run: |-
|
||||
cd docs
|
||||
DISABLE_SPHINX_INLINE_TABS=1 sphinx-build -b xml . _build
|
||||
sphinx-build -b xml . _build
|
||||
sphinx-to-sqlite ../docs.db _build
|
||||
cd ..
|
||||
- name: Set up the alternate-route demo
|
||||
|
|
@ -58,50 +57,19 @@ jobs:
|
|||
db.route = "alternative-route"
|
||||
' > plugins/alternative_route.py
|
||||
cp fixtures.db fixtures2.db
|
||||
- name: And the counters writable canned query demo
|
||||
- name: Make some modifications to metadata.json
|
||||
run: |
|
||||
cat > plugins/counters.py <<EOF
|
||||
from datasette import hookimpl
|
||||
@hookimpl
|
||||
def startup(datasette):
|
||||
db = datasette.add_memory_database("counters")
|
||||
async def inner():
|
||||
await db.execute_write("create table if not exists counters (name text primary key, value integer)")
|
||||
await db.execute_write("insert or ignore into counters (name, value) values ('counter_a', 0)")
|
||||
await db.execute_write("insert or ignore into counters (name, value) values ('counter_b', 0)")
|
||||
await db.execute_write("insert or ignore into counters (name, value) values ('counter_c', 0)")
|
||||
return inner
|
||||
@hookimpl
|
||||
def canned_queries(database):
|
||||
if database == "counters":
|
||||
queries = {}
|
||||
for name in ("counter_a", "counter_b", "counter_c"):
|
||||
queries["increment_{}".format(name)] = {
|
||||
"sql": "update counters set value = value + 1 where name = '{}'".format(name),
|
||||
"on_success_message_sql": "select 'Counter {name} incremented to ' || value from counters where name = '{name}'".format(name=name),
|
||||
"write": True,
|
||||
}
|
||||
queries["decrement_{}".format(name)] = {
|
||||
"sql": "update counters set value = value - 1 where name = '{}'".format(name),
|
||||
"on_success_message_sql": "select 'Counter {name} decremented to ' || value from counters where name = '{name}'".format(name=name),
|
||||
"write": True,
|
||||
}
|
||||
return queries
|
||||
EOF
|
||||
# - name: Make some modifications to metadata.json
|
||||
# run: |
|
||||
# cat fixtures.json | \
|
||||
# jq '.databases |= . + {"ephemeral": {"allow": {"id": "*"}}}' | \
|
||||
# jq '.plugins |= . + {"datasette-ephemeral-tables": {"table_ttl": 900}}' \
|
||||
# > metadata.json
|
||||
# cat metadata.json
|
||||
- id: auth
|
||||
name: Authenticate to Google Cloud
|
||||
uses: google-github-actions/auth@v3
|
||||
cat fixtures.json | \
|
||||
jq '.databases |= . + {"ephemeral": {"allow": {"id": "*"}}}' | \
|
||||
jq '.plugins |= . + {"datasette-ephemeral-tables": {"table_ttl": 900}}' \
|
||||
> metadata.json
|
||||
cat metadata.json
|
||||
- name: Set up Cloud Run
|
||||
uses: google-github-actions/setup-gcloud@v0
|
||||
with:
|
||||
credentials_json: ${{ secrets.GCP_SA_KEY }}
|
||||
- name: Set up Cloud SDK
|
||||
uses: google-github-actions/setup-gcloud@v3
|
||||
version: '318.0.0'
|
||||
service_account_email: ${{ secrets.GCP_SA_EMAIL }}
|
||||
service_account_key: ${{ secrets.GCP_SA_KEY }}
|
||||
- name: Deploy to Cloud Run
|
||||
env:
|
||||
LATEST_DATASETTE_SECRET: ${{ secrets.LATEST_DATASETTE_SECRET }}
|
||||
|
|
@ -113,7 +81,7 @@ jobs:
|
|||
# Replace 1.0 with one-dot-zero in SUFFIX
|
||||
export SUFFIX=${SUFFIX//1.0/one-dot-zero}
|
||||
datasette publish cloudrun fixtures.db fixtures2.db extra_database.db \
|
||||
-m fixtures-metadata.json \
|
||||
-m metadata.json \
|
||||
--plugins-dir=plugins \
|
||||
--branch=$GITHUB_SHA \
|
||||
--version-note=$GITHUB_SHA \
|
||||
|
|
|
|||
4
.github/workflows/prettier.yml
vendored
4
.github/workflows/prettier.yml
vendored
|
|
@ -10,8 +10,8 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out repo
|
||||
uses: actions/checkout@v4
|
||||
- uses: actions/cache@v4
|
||||
uses: actions/checkout@v2
|
||||
- uses: actions/cache@v2
|
||||
name: Configure npm caching
|
||||
with:
|
||||
path: ~/.npm
|
||||
|
|
|
|||
73
.github/workflows/publish.yml
vendored
73
.github/workflows/publish.yml
vendored
|
|
@ -12,15 +12,20 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"]
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v6
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: pip
|
||||
cache-dependency-path: pyproject.toml
|
||||
- uses: actions/cache@v3
|
||||
name: Configure pip caching
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pip install -e '.[test]'
|
||||
|
|
@ -31,38 +36,47 @@ jobs:
|
|||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [test]
|
||||
environment: release
|
||||
permissions:
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.13'
|
||||
cache: pip
|
||||
cache-dependency-path: pyproject.toml
|
||||
python-version: '3.11'
|
||||
- uses: actions/cache@v3
|
||||
name: Configure pip caching
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-publish-pip-${{ hashFiles('**/setup.py') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-publish-pip-
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pip install setuptools wheel build
|
||||
- name: Build
|
||||
run: |
|
||||
python -m build
|
||||
pip install setuptools wheel twine
|
||||
- name: Publish
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
env:
|
||||
TWINE_USERNAME: __token__
|
||||
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
|
||||
run: |
|
||||
python setup.py sdist bdist_wheel
|
||||
twine upload dist/*
|
||||
|
||||
deploy_static_docs:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [deploy]
|
||||
if: "!github.event.release.prerelease"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
cache: pip
|
||||
cache-dependency-path: pyproject.toml
|
||||
- uses: actions/cache@v2
|
||||
name: Configure pip caching
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-publish-pip-${{ hashFiles('**/setup.py') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-publish-pip-
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install -e .[docs]
|
||||
|
|
@ -70,16 +84,15 @@ jobs:
|
|||
- name: Build docs.db
|
||||
run: |-
|
||||
cd docs
|
||||
DISABLE_SPHINX_INLINE_TABS=1 sphinx-build -b xml . _build
|
||||
sphinx-build -b xml . _build
|
||||
sphinx-to-sqlite ../docs.db _build
|
||||
cd ..
|
||||
- id: auth
|
||||
name: Authenticate to Google Cloud
|
||||
uses: google-github-actions/auth@v2
|
||||
- name: Set up Cloud Run
|
||||
uses: google-github-actions/setup-gcloud@v0
|
||||
with:
|
||||
credentials_json: ${{ secrets.GCP_SA_KEY }}
|
||||
- name: Set up Cloud SDK
|
||||
uses: google-github-actions/setup-gcloud@v3
|
||||
version: '275.0.0'
|
||||
service_account_email: ${{ secrets.GCP_SA_EMAIL }}
|
||||
service_account_key: ${{ secrets.GCP_SA_KEY }}
|
||||
- name: Deploy stable-docs.datasette.io to Cloud Run
|
||||
run: |-
|
||||
gcloud config set run/region us-central1
|
||||
|
|
@ -92,7 +105,7 @@ jobs:
|
|||
needs: [deploy]
|
||||
if: "!github.event.release.prerelease"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v2
|
||||
- name: Build and push to Docker Hub
|
||||
env:
|
||||
DOCKER_USER: ${{ secrets.DOCKER_USER }}
|
||||
|
|
|
|||
19
.github/workflows/spellcheck.yml
vendored
19
.github/workflows/spellcheck.yml
vendored
|
|
@ -9,19 +9,22 @@ jobs:
|
|||
spellcheck:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
cache-dependency-path: '**/pyproject.toml'
|
||||
python-version: 3.9
|
||||
- uses: actions/cache@v2
|
||||
name: Configure pip caching
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pip install -e '.[docs]'
|
||||
- name: Check spelling
|
||||
run: |
|
||||
codespell README.md --ignore-words docs/codespell-ignore-words.txt
|
||||
codespell docs/*.rst --ignore-words docs/codespell-ignore-words.txt
|
||||
codespell datasette -S datasette/static --ignore-words docs/codespell-ignore-words.txt
|
||||
codespell tests --ignore-words docs/codespell-ignore-words.txt
|
||||
|
|
|
|||
76
.github/workflows/stable-docs.yml
vendored
76
.github/workflows/stable-docs.yml
vendored
|
|
@ -1,76 +0,0 @@
|
|||
name: Update Stable Docs
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
update_stable_docs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0 # We need all commits to find docs/ changes
|
||||
- name: Set up Git user
|
||||
run: |
|
||||
git config user.name "Automated"
|
||||
git config user.email "actions@users.noreply.github.com"
|
||||
- name: Create stable branch if it does not yet exist
|
||||
run: |
|
||||
if ! git ls-remote --heads origin stable | grep -qE '\bstable\b'; then
|
||||
# Make sure we have all tags locally
|
||||
git fetch --tags --quiet
|
||||
|
||||
# Latest tag that is just numbers and dots (optionally prefixed with 'v')
|
||||
# e.g., 0.65.2 or v0.65.2 — excludes 1.0a20, 1.0-rc1, etc.
|
||||
LATEST_RELEASE=$(
|
||||
git tag -l --sort=-v:refname \
|
||||
| grep -E '^v?[0-9]+(\.[0-9]+){1,3}$' \
|
||||
| head -n1
|
||||
)
|
||||
|
||||
git checkout -b stable
|
||||
|
||||
# If there are any stable releases, copy docs/ from the most recent
|
||||
if [ -n "$LATEST_RELEASE" ]; then
|
||||
rm -rf docs/
|
||||
git checkout "$LATEST_RELEASE" -- docs/ || true
|
||||
fi
|
||||
|
||||
git commit -m "Populate docs/ from $LATEST_RELEASE" || echo "No changes"
|
||||
git push -u origin stable
|
||||
fi
|
||||
- name: Handle Release
|
||||
if: github.event_name == 'release' && !github.event.release.prerelease
|
||||
run: |
|
||||
git fetch --all
|
||||
git checkout stable
|
||||
git reset --hard ${GITHUB_REF#refs/tags/}
|
||||
git push origin stable --force
|
||||
- name: Handle Commit to Main
|
||||
if: contains(github.event.head_commit.message, '!stable-docs')
|
||||
run: |
|
||||
git fetch origin
|
||||
git checkout -b stable origin/stable
|
||||
# Get the list of modified files in docs/ from the current commit
|
||||
FILES=$(git diff-tree --no-commit-id --name-only -r ${{ github.sha }} -- docs/)
|
||||
# Check if the list of files is non-empty
|
||||
if [[ -n "$FILES" ]]; then
|
||||
# Checkout those files to the stable branch to over-write with their contents
|
||||
for FILE in $FILES; do
|
||||
git checkout ${{ github.sha }} -- $FILE
|
||||
done
|
||||
git add docs/
|
||||
git commit -m "Doc changes from ${{ github.sha }}"
|
||||
git push origin stable
|
||||
else
|
||||
echo "No changes to docs/ in this commit."
|
||||
exit 0
|
||||
fi
|
||||
17
.github/workflows/test-coverage.yml
vendored
17
.github/workflows/test-coverage.yml
vendored
|
|
@ -15,13 +15,18 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out datasette
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.12'
|
||||
cache: 'pip'
|
||||
cache-dependency-path: '**/pyproject.toml'
|
||||
python-version: 3.9
|
||||
- uses: actions/cache@v2
|
||||
name: Configure pip caching
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
|
|
@ -31,7 +36,7 @@ jobs:
|
|||
run: |-
|
||||
ls -lah
|
||||
cat .coveragerc
|
||||
pytest -m "not serial" --cov=datasette --cov-config=.coveragerc --cov-report xml:coverage.xml --cov-report term -x
|
||||
pytest --cov=datasette --cov-config=.coveragerc --cov-report xml:coverage.xml --cov-report term
|
||||
ls -lah
|
||||
- name: Upload coverage report
|
||||
uses: codecov/codecov-action@v1
|
||||
|
|
|
|||
8
.github/workflows/test-pyodide.yml
vendored
8
.github/workflows/test-pyodide.yml
vendored
|
|
@ -12,15 +12,15 @@ jobs:
|
|||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v6
|
||||
uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: 'pip'
|
||||
cache-dependency-path: '**/pyproject.toml'
|
||||
cache-dependency-path: '**/setup.py'
|
||||
- name: Cache Playwright browsers
|
||||
uses: actions/cache@v4
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.cache/ms-playwright/
|
||||
key: ${{ runner.os }}-browsers
|
||||
|
|
|
|||
53
.github/workflows/test-sqlite-support.yml
vendored
53
.github/workflows/test-sqlite-support.yml
vendored
|
|
@ -1,53 +0,0 @@
|
|||
name: Test SQLite versions
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ${{ matrix.platform }}
|
||||
continue-on-error: true
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest]
|
||||
python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"]
|
||||
sqlite-version: [
|
||||
#"3", # latest version
|
||||
"3.46",
|
||||
#"3.45",
|
||||
#"3.27",
|
||||
#"3.26",
|
||||
"3.25",
|
||||
#"3.25.3", # 2018-09-25, window functions breaks test_upsert for some reason on 3.10, skip for now
|
||||
#"3.24", # 2018-06-04, added UPSERT support
|
||||
#"3.23.1" # 2018-04-10, before UPSERT
|
||||
]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
allow-prereleases: true
|
||||
cache: pip
|
||||
cache-dependency-path: pyproject.toml
|
||||
- name: Set up SQLite ${{ matrix.sqlite-version }}
|
||||
uses: asg017/sqlite-versions@71ea0de37ae739c33e447af91ba71dda8fcf22e6
|
||||
with:
|
||||
version: ${{ matrix.sqlite-version }}
|
||||
cflags: "-DSQLITE_ENABLE_DESERIALIZE -DSQLITE_ENABLE_FTS5 -DSQLITE_ENABLE_FTS4 -DSQLITE_ENABLE_FTS3_PARENTHESIS -DSQLITE_ENABLE_RTREE -DSQLITE_ENABLE_JSON1"
|
||||
- run: python3 -c "import sqlite3; print(sqlite3.sqlite_version)"
|
||||
- run: echo $LD_LIBRARY_PATH
|
||||
- name: Build extension for --load-extension test
|
||||
run: |-
|
||||
(cd tests && gcc ext.c -fPIC -shared -o ext.so)
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pip install -e '.[test]'
|
||||
pip freeze
|
||||
- name: Run tests
|
||||
run: |
|
||||
pytest -n auto -m "not serial"
|
||||
pytest -m "serial"
|
||||
27
.github/workflows/test.yml
vendored
27
.github/workflows/test.yml
vendored
|
|
@ -10,16 +10,20 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"]
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v6
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
allow-prereleases: true
|
||||
cache: pip
|
||||
cache-dependency-path: pyproject.toml
|
||||
- uses: actions/cache@v3
|
||||
name: Configure pip caching
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
- name: Build extension for --load-extension test
|
||||
run: |-
|
||||
(cd tests && gcc ext.c -fPIC -shared -o ext.so)
|
||||
|
|
@ -31,13 +35,6 @@ jobs:
|
|||
run: |
|
||||
pytest -n auto -m "not serial"
|
||||
pytest -m "serial"
|
||||
# And the test that exceeds a localhost HTTPS server
|
||||
tests/test_datasette_https_server.sh
|
||||
- name: Install docs dependencies
|
||||
run: |
|
||||
pip install -e '.[docs]'
|
||||
- name: Black
|
||||
run: black --check .
|
||||
- name: Check if cog needs to be run
|
||||
run: |
|
||||
cog --check docs/*.rst
|
||||
|
|
@ -45,7 +42,3 @@ jobs:
|
|||
run: |
|
||||
# This fails on syntax errors, or a diff was applied
|
||||
blacken-docs -l 60 docs/*.rst
|
||||
- name: Test DATASETTE_LOAD_PLUGINS
|
||||
run: |
|
||||
pip install datasette-init datasette-json-html
|
||||
tests/test-datasette-load-plugins.sh
|
||||
|
|
|
|||
3
.github/workflows/tmate.yml
vendored
3
.github/workflows/tmate.yml
vendored
|
|
@ -5,7 +5,6 @@ on:
|
|||
|
||||
permissions:
|
||||
contents: read
|
||||
models: read
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
|
@ -14,5 +13,3 @@ jobs:
|
|||
- uses: actions/checkout@v2
|
||||
- name: Setup tmate session
|
||||
uses: mxschmitt/action-tmate@v3
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
|
|
|||
5
.gitignore
vendored
5
.gitignore
vendored
|
|
@ -5,9 +5,6 @@ scratchpad
|
|||
|
||||
.vscode
|
||||
|
||||
uv.lock
|
||||
data.db
|
||||
|
||||
# We don't use Pipfile, so ignore them
|
||||
Pipfile
|
||||
Pipfile.lock
|
||||
|
|
@ -126,4 +123,4 @@ node_modules
|
|||
# include it in source control.
|
||||
tests/*.dylib
|
||||
tests/*.so
|
||||
tests/*.dll
|
||||
tests/*.dll
|
||||
|
|
@ -3,7 +3,7 @@ version: 2
|
|||
build:
|
||||
os: ubuntu-20.04
|
||||
tools:
|
||||
python: "3.11"
|
||||
python: "3.9"
|
||||
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
|
|
|
|||
56
Justfile
56
Justfile
|
|
@ -1,56 +0,0 @@
|
|||
export DATASETTE_SECRET := "not_a_secret"
|
||||
|
||||
# Run tests and linters
|
||||
@default: test lint
|
||||
|
||||
# Setup project
|
||||
@init:
|
||||
uv sync --extra test --extra docs
|
||||
|
||||
# Run pytest with supplied options
|
||||
@test *options: init
|
||||
uv run pytest -n auto {{options}}
|
||||
|
||||
@codespell:
|
||||
uv run codespell README.md --ignore-words docs/codespell-ignore-words.txt
|
||||
uv run codespell docs/*.rst --ignore-words docs/codespell-ignore-words.txt
|
||||
uv run codespell datasette -S datasette/static --ignore-words docs/codespell-ignore-words.txt
|
||||
uv run codespell tests --ignore-words docs/codespell-ignore-words.txt
|
||||
|
||||
# Run linters: black, flake8, mypy, cog
|
||||
@lint: codespell
|
||||
uv run black . --check
|
||||
uv run flake8
|
||||
uv run --extra test cog --check README.md docs/*.rst
|
||||
|
||||
# Rebuild docs with cog
|
||||
@cog:
|
||||
uv run --extra test cog -r README.md docs/*.rst
|
||||
|
||||
# Serve live docs on localhost:8000
|
||||
@docs: cog blacken-docs
|
||||
uv run --extra docs make -C docs livehtml
|
||||
|
||||
# Build docs as static HTML
|
||||
@docs-build: cog blacken-docs
|
||||
rm -rf docs/_build && cd docs && uv run make html
|
||||
|
||||
# Apply Black
|
||||
@black:
|
||||
uv run black .
|
||||
|
||||
# Apply blacken-docs
|
||||
@blacken-docs:
|
||||
uv run blacken-docs -l 60 docs/*.rst
|
||||
|
||||
# Apply prettier
|
||||
@prettier:
|
||||
npm run fix
|
||||
|
||||
# Format code with both black and prettier
|
||||
@format: black prettier blacken-docs
|
||||
|
||||
@serve *options:
|
||||
uv run sqlite-utils create-database data.db
|
||||
uv run sqlite-utils create-table data.db docs id integer title text --pk id --ignore
|
||||
uv run python -m datasette data.db --root --reload {{options}}
|
||||
10
README.md
10
README.md
|
|
@ -1,13 +1,13 @@
|
|||
<img src="https://datasette.io/static/datasette-logo.svg" alt="Datasette">
|
||||
|
||||
[](https://pypi.org/project/datasette/)
|
||||
[](https://docs.datasette.io/en/latest/changelog.html)
|
||||
[](https://docs.datasette.io/en/stable/changelog.html)
|
||||
[](https://pypi.org/project/datasette/)
|
||||
[](https://github.com/simonw/datasette/actions?query=workflow%3ATest)
|
||||
[](https://docs.datasette.io/en/latest/?badge=latest)
|
||||
[](https://github.com/simonw/datasette/blob/main/LICENSE)
|
||||
[](https://hub.docker.com/r/datasetteproject/datasette)
|
||||
[](https://datasette.io/discord)
|
||||
[](https://discord.gg/ktd74dm5mw)
|
||||
|
||||
*An open source multi-tool for exploring and publishing data*
|
||||
|
||||
|
|
@ -15,14 +15,14 @@ Datasette is a tool for exploring and publishing data. It helps people take data
|
|||
|
||||
Datasette is aimed at data journalists, museum curators, archivists, local governments, scientists, researchers and anyone else who has data that they wish to share with the world.
|
||||
|
||||
[Explore a demo](https://datasette.io/global-power-plants/global-power-plants), watch [a video about the project](https://simonwillison.net/2021/Feb/7/video/) or try it out [on GitHub Codespaces](https://github.com/datasette/datasette-studio).
|
||||
[Explore a demo](https://global-power-plants.datasettes.com/global-power-plants/global-power-plants), watch [a video about the project](https://simonwillison.net/2021/Feb/7/video/) or try it out by [uploading and publishing your own CSV data](https://docs.datasette.io/en/stable/getting_started.html#try-datasette-without-installing-anything-using-glitch).
|
||||
|
||||
* [datasette.io](https://datasette.io/) is the official project website
|
||||
* Latest [Datasette News](https://datasette.io/news)
|
||||
* Comprehensive documentation: https://docs.datasette.io/
|
||||
* Examples: https://datasette.io/examples
|
||||
* Live demo of current `main` branch: https://latest.datasette.io/
|
||||
* Questions, feedback or want to talk about the project? Join our [Discord](https://datasette.io/discord)
|
||||
* Questions, feedback or want to talk about the project? Join our [Discord](https://discord.gg/ktd74dm5mw)
|
||||
|
||||
Want to stay up-to-date with the project? Subscribe to the [Datasette newsletter](https://datasette.substack.com/) for tips, tricks and news on what's new in the Datasette ecosystem.
|
||||
|
||||
|
|
@ -36,7 +36,7 @@ You can also install it using `pip` or `pipx`:
|
|||
|
||||
pip install datasette
|
||||
|
||||
Datasette requires Python 3.8 or higher. We also have [detailed installation instructions](https://docs.datasette.io/en/stable/installation.html) covering other options such as Docker.
|
||||
Datasette requires Python 3.7 or higher. We also have [detailed installation instructions](https://docs.datasette.io/en/stable/installation.html) covering other options such as Docker.
|
||||
|
||||
## Basic usage
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +1,5 @@
|
|||
from datasette.permissions import Permission # noqa
|
||||
from datasette.version import __version_info__, __version__ # noqa
|
||||
from datasette.events import Event # noqa
|
||||
from datasette.utils.asgi import Forbidden, NotFound, Request, Response # noqa
|
||||
from datasette.utils import actor_matches_allow # noqa
|
||||
from datasette.views import Context # noqa
|
||||
from .hookspecs import hookimpl # noqa
|
||||
from .hookspecs import hookspec # noqa
|
||||
|
|
|
|||
1454
datasette/app.py
1454
datasette/app.py
File diff suppressed because it is too large
Load diff
445
datasette/cli.py
445
datasette/cli.py
|
|
@ -4,17 +4,16 @@ import click
|
|||
from click import formatting
|
||||
from click.types import CompositeParamType
|
||||
from click_default_group import DefaultGroup
|
||||
import functools
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
from runpy import run_module
|
||||
import shutil
|
||||
from subprocess import call
|
||||
import sys
|
||||
import textwrap
|
||||
from runpy import run_module
|
||||
import webbrowser
|
||||
from .app import (
|
||||
OBSOLETE_SETTINGS,
|
||||
Datasette,
|
||||
DEFAULT_SETTINGS,
|
||||
SETTINGS,
|
||||
|
|
@ -25,13 +24,11 @@ from .utils import (
|
|||
LoadExtension,
|
||||
StartupError,
|
||||
check_connection,
|
||||
deep_dict_update,
|
||||
find_spatialite,
|
||||
parse_metadata,
|
||||
ConnectionProblem,
|
||||
SpatialiteConnectionProblem,
|
||||
initial_path_for_datasette,
|
||||
pairs_to_nested_config,
|
||||
temporary_docker_directory,
|
||||
value_as_boolean,
|
||||
SpatialiteNotFound,
|
||||
|
|
@ -42,18 +39,6 @@ from .utils.sqlite import sqlite3
|
|||
from .utils.testing import TestClient
|
||||
from .version import __version__
|
||||
|
||||
|
||||
def run_sync(coro_func):
|
||||
"""Run an async callable to completion on a fresh event loop."""
|
||||
loop = asyncio.new_event_loop()
|
||||
try:
|
||||
asyncio.set_event_loop(loop)
|
||||
return loop.run_until_complete(coro_func())
|
||||
finally:
|
||||
asyncio.set_event_loop(None)
|
||||
loop.close()
|
||||
|
||||
|
||||
# Use Rich for tracebacks if it is installed
|
||||
try:
|
||||
from rich.traceback import install
|
||||
|
|
@ -63,65 +48,93 @@ except ImportError:
|
|||
pass
|
||||
|
||||
|
||||
class Config(click.ParamType):
|
||||
# This will be removed in Datasette 1.0 in favour of class Setting
|
||||
name = "config"
|
||||
|
||||
def convert(self, config, param, ctx):
|
||||
if ":" not in config:
|
||||
self.fail(f'"{config}" should be name:value', param, ctx)
|
||||
return
|
||||
name, value = config.split(":", 1)
|
||||
if name not in DEFAULT_SETTINGS:
|
||||
msg = (
|
||||
OBSOLETE_SETTINGS.get(name)
|
||||
or f"{name} is not a valid option (--help-settings to see all)"
|
||||
)
|
||||
self.fail(
|
||||
msg,
|
||||
param,
|
||||
ctx,
|
||||
)
|
||||
return
|
||||
# Type checking
|
||||
default = DEFAULT_SETTINGS[name]
|
||||
if isinstance(default, bool):
|
||||
try:
|
||||
return name, value_as_boolean(value)
|
||||
except ValueAsBooleanError:
|
||||
self.fail(f'"{name}" should be on/off/true/false/1/0', param, ctx)
|
||||
return
|
||||
elif isinstance(default, int):
|
||||
if not value.isdigit():
|
||||
self.fail(f'"{name}" should be an integer', param, ctx)
|
||||
return
|
||||
return name, int(value)
|
||||
elif isinstance(default, str):
|
||||
return name, value
|
||||
else:
|
||||
# Should never happen:
|
||||
self.fail("Invalid option")
|
||||
|
||||
|
||||
class Setting(CompositeParamType):
|
||||
name = "setting"
|
||||
arity = 2
|
||||
|
||||
def convert(self, config, param, ctx):
|
||||
name, value = config
|
||||
if name in DEFAULT_SETTINGS:
|
||||
# For backwards compatibility with how this worked prior to
|
||||
# Datasette 1.0, we turn bare setting names into setting.name
|
||||
# Type checking for those older settings
|
||||
default = DEFAULT_SETTINGS[name]
|
||||
name = "settings.{}".format(name)
|
||||
if isinstance(default, bool):
|
||||
try:
|
||||
return name, "true" if value_as_boolean(value) else "false"
|
||||
except ValueAsBooleanError:
|
||||
self.fail(f'"{name}" should be on/off/true/false/1/0', param, ctx)
|
||||
elif isinstance(default, int):
|
||||
if not value.isdigit():
|
||||
self.fail(f'"{name}" should be an integer', param, ctx)
|
||||
return name, value
|
||||
elif isinstance(default, str):
|
||||
return name, value
|
||||
else:
|
||||
# Should never happen:
|
||||
self.fail("Invalid option")
|
||||
return name, value
|
||||
if name not in DEFAULT_SETTINGS:
|
||||
msg = (
|
||||
OBSOLETE_SETTINGS.get(name)
|
||||
or f"{name} is not a valid option (--help-settings to see all)"
|
||||
)
|
||||
self.fail(
|
||||
msg,
|
||||
param,
|
||||
ctx,
|
||||
)
|
||||
return
|
||||
# Type checking
|
||||
default = DEFAULT_SETTINGS[name]
|
||||
if isinstance(default, bool):
|
||||
try:
|
||||
return name, value_as_boolean(value)
|
||||
except ValueAsBooleanError:
|
||||
self.fail(f'"{name}" should be on/off/true/false/1/0', param, ctx)
|
||||
return
|
||||
elif isinstance(default, int):
|
||||
if not value.isdigit():
|
||||
self.fail(f'"{name}" should be an integer', param, ctx)
|
||||
return
|
||||
return name, int(value)
|
||||
elif isinstance(default, str):
|
||||
return name, value
|
||||
else:
|
||||
# Should never happen:
|
||||
self.fail("Invalid option")
|
||||
|
||||
|
||||
def sqlite_extensions(fn):
|
||||
fn = click.option(
|
||||
return click.option(
|
||||
"sqlite_extensions",
|
||||
"--load-extension",
|
||||
type=LoadExtension(),
|
||||
envvar="DATASETTE_LOAD_EXTENSION",
|
||||
envvar="SQLITE_EXTENSIONS",
|
||||
multiple=True,
|
||||
help="Path to a SQLite extension to load, and optional entrypoint",
|
||||
)(fn)
|
||||
|
||||
# Wrap it in a custom error handler
|
||||
@functools.wraps(fn)
|
||||
def wrapped(*args, **kwargs):
|
||||
try:
|
||||
return fn(*args, **kwargs)
|
||||
except AttributeError as e:
|
||||
if "enable_load_extension" in str(e):
|
||||
raise click.ClickException(
|
||||
textwrap.dedent(
|
||||
"""
|
||||
Your Python installation does not have the ability to load SQLite extensions.
|
||||
|
||||
More information: https://datasette.io/help/extensions
|
||||
"""
|
||||
).strip()
|
||||
)
|
||||
raise
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
@click.group(cls=DefaultGroup, default="serve", default_if_no_args=True)
|
||||
@click.version_option(version=__version__)
|
||||
|
|
@ -146,7 +159,9 @@ def inspect(files, inspect_file, sqlite_extensions):
|
|||
This can then be passed to "datasette --inspect-file" to speed up count
|
||||
operations against immutable database files.
|
||||
"""
|
||||
inspect_data = run_sync(lambda: inspect_(files, sqlite_extensions))
|
||||
app = Datasette([], immutables=files, sqlite_extensions=sqlite_extensions)
|
||||
loop = asyncio.get_event_loop()
|
||||
inspect_data = loop.run_until_complete(inspect_(files, sqlite_extensions))
|
||||
if inspect_file == "-":
|
||||
sys.stdout.write(json.dumps(inspect_data, indent=2))
|
||||
else:
|
||||
|
|
@ -158,6 +173,9 @@ async def inspect_(files, sqlite_extensions):
|
|||
app = Datasette([], immutables=files, sqlite_extensions=sqlite_extensions)
|
||||
data = {}
|
||||
for name, database in app.databases.items():
|
||||
if name == "_internal":
|
||||
# Don't include the in-memory _internal database
|
||||
continue
|
||||
counts = await database.table_counts(limit=3600 * 1000)
|
||||
data[name] = {
|
||||
"hash": database.hash,
|
||||
|
|
@ -183,23 +201,15 @@ pm.hook.publish_subcommand(publish=publish)
|
|||
|
||||
@cli.command()
|
||||
@click.option("--all", help="Include built-in default plugins", is_flag=True)
|
||||
@click.option(
|
||||
"--requirements", help="Output requirements.txt of installed plugins", is_flag=True
|
||||
)
|
||||
@click.option(
|
||||
"--plugins-dir",
|
||||
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
||||
help="Path to directory containing custom plugins",
|
||||
)
|
||||
def plugins(all, requirements, plugins_dir):
|
||||
def plugins(all, plugins_dir):
|
||||
"""List currently installed plugins"""
|
||||
app = Datasette([], plugins_dir=plugins_dir)
|
||||
if requirements:
|
||||
for plugin in app._plugins():
|
||||
if plugin["version"]:
|
||||
click.echo("{}=={}".format(plugin["name"], plugin["version"]))
|
||||
else:
|
||||
click.echo(json.dumps(app._plugins(all=all), indent=4))
|
||||
click.echo(json.dumps(app._plugins(all=all), indent=4))
|
||||
|
||||
|
||||
@cli.command()
|
||||
|
|
@ -309,32 +319,15 @@ def package(
|
|||
|
||||
|
||||
@cli.command()
|
||||
@click.argument("packages", nargs=-1)
|
||||
@click.argument("packages", nargs=-1, required=True)
|
||||
@click.option(
|
||||
"-U", "--upgrade", is_flag=True, help="Upgrade packages to latest version"
|
||||
)
|
||||
@click.option(
|
||||
"-r",
|
||||
"--requirement",
|
||||
type=click.Path(exists=True),
|
||||
help="Install from requirements file",
|
||||
)
|
||||
@click.option(
|
||||
"-e",
|
||||
"--editable",
|
||||
help="Install a project in editable mode from this path",
|
||||
)
|
||||
def install(packages, upgrade, requirement, editable):
|
||||
def install(packages, upgrade):
|
||||
"""Install plugins and packages from PyPI into the same environment as Datasette"""
|
||||
if not packages and not requirement and not editable:
|
||||
raise click.UsageError("Please specify at least one package to install")
|
||||
args = ["pip", "install"]
|
||||
if upgrade:
|
||||
args += ["--upgrade"]
|
||||
if editable:
|
||||
args += ["--editable", editable]
|
||||
if requirement:
|
||||
args += ["-r", requirement]
|
||||
args += list(packages)
|
||||
sys.argv = args
|
||||
run_module("pip", run_name="__main__")
|
||||
|
|
@ -415,17 +408,16 @@ def uninstall(packages, yes):
|
|||
)
|
||||
@click.option("--memory", is_flag=True, help="Make /_memory database available")
|
||||
@click.option(
|
||||
"-c",
|
||||
"--config",
|
||||
type=click.File(mode="r"),
|
||||
help="Path to JSON/YAML Datasette configuration file",
|
||||
type=Config(),
|
||||
help="Deprecated: set config option using configname:value. Use --setting instead.",
|
||||
multiple=True,
|
||||
)
|
||||
@click.option(
|
||||
"-s",
|
||||
"--setting",
|
||||
"settings",
|
||||
type=Setting(),
|
||||
help="nested.key, value setting to use in Datasette configuration",
|
||||
help="Setting, see docs.datasette.io/en/stable/settings.html",
|
||||
multiple=True,
|
||||
)
|
||||
@click.option(
|
||||
|
|
@ -438,28 +430,10 @@ def uninstall(packages, yes):
|
|||
help="Output URL that sets a cookie authenticating the root user",
|
||||
is_flag=True,
|
||||
)
|
||||
@click.option(
|
||||
"--default-deny",
|
||||
help="Deny all permissions by default",
|
||||
is_flag=True,
|
||||
)
|
||||
@click.option(
|
||||
"--get",
|
||||
help="Run an HTTP GET request against this path, print results and exit",
|
||||
)
|
||||
@click.option(
|
||||
"--headers",
|
||||
is_flag=True,
|
||||
help="Include HTTP headers in --get output",
|
||||
)
|
||||
@click.option(
|
||||
"--token",
|
||||
help="API token to send with --get requests",
|
||||
)
|
||||
@click.option(
|
||||
"--actor",
|
||||
help="Actor to use for --get requests (JSON string)",
|
||||
)
|
||||
@click.option("--version-note", help="Additional note to show on /-/versions")
|
||||
@click.option("--help-settings", is_flag=True, help="Show available settings")
|
||||
@click.option("--pdb", is_flag=True, help="Launch debugger on any errors")
|
||||
|
|
@ -488,17 +462,10 @@ def uninstall(packages, yes):
|
|||
@click.option(
|
||||
"--ssl-keyfile",
|
||||
help="SSL key file",
|
||||
envvar="DATASETTE_SSL_KEYFILE",
|
||||
)
|
||||
@click.option(
|
||||
"--ssl-certfile",
|
||||
help="SSL certificate file",
|
||||
envvar="DATASETTE_SSL_CERTFILE",
|
||||
)
|
||||
@click.option(
|
||||
"--internal",
|
||||
type=click.Path(),
|
||||
help="Path to a persistent Datasette internal SQLite database",
|
||||
)
|
||||
def serve(
|
||||
files,
|
||||
|
|
@ -519,11 +486,7 @@ def serve(
|
|||
settings,
|
||||
secret,
|
||||
root,
|
||||
default_deny,
|
||||
get,
|
||||
headers,
|
||||
token,
|
||||
actor,
|
||||
version_note,
|
||||
help_settings,
|
||||
pdb,
|
||||
|
|
@ -533,7 +496,6 @@ def serve(
|
|||
nolock,
|
||||
ssl_keyfile,
|
||||
ssl_certfile,
|
||||
internal,
|
||||
return_instance=False,
|
||||
):
|
||||
"""Serve up specified SQLite database files with a web UI"""
|
||||
|
|
@ -554,8 +516,6 @@ def serve(
|
|||
reloader = hupper.start_reloader("datasette.cli.serve")
|
||||
if immutable:
|
||||
reloader.watch_files(immutable)
|
||||
if config:
|
||||
reloader.watch_files([config.name])
|
||||
if metadata:
|
||||
reloader.watch_files([metadata.name])
|
||||
|
||||
|
|
@ -568,55 +528,41 @@ def serve(
|
|||
if metadata:
|
||||
metadata_data = parse_metadata(metadata.read())
|
||||
|
||||
config_data = None
|
||||
combined_settings = {}
|
||||
if config:
|
||||
config_data = parse_metadata(config.read())
|
||||
|
||||
config_data = config_data or {}
|
||||
|
||||
# Merge in settings from -s/--setting
|
||||
if settings:
|
||||
settings_updates = pairs_to_nested_config(settings)
|
||||
# Merge recursively, to avoid over-writing nested values
|
||||
# https://github.com/simonw/datasette/issues/2389
|
||||
deep_dict_update(config_data, settings_updates)
|
||||
click.echo(
|
||||
"--config name:value will be deprecated in Datasette 1.0, use --setting name value instead",
|
||||
err=True,
|
||||
)
|
||||
combined_settings.update(config)
|
||||
combined_settings.update(settings)
|
||||
|
||||
kwargs = dict(
|
||||
immutables=immutable,
|
||||
cache_headers=not reload,
|
||||
cors=cors,
|
||||
inspect_data=inspect_data,
|
||||
config=config_data,
|
||||
metadata=metadata_data,
|
||||
sqlite_extensions=sqlite_extensions,
|
||||
template_dir=template_dir,
|
||||
plugins_dir=plugins_dir,
|
||||
static_mounts=static,
|
||||
settings=None, # These are passed in config= now
|
||||
settings=combined_settings,
|
||||
memory=memory,
|
||||
secret=secret,
|
||||
version_note=version_note,
|
||||
pdb=pdb,
|
||||
crossdb=crossdb,
|
||||
nolock=nolock,
|
||||
internal=internal,
|
||||
default_deny=default_deny,
|
||||
)
|
||||
|
||||
# Separate directories from files
|
||||
directories = [f for f in files if os.path.isdir(f)]
|
||||
file_paths = [f for f in files if not os.path.isdir(f)]
|
||||
|
||||
# Handle config_dir - only one directory allowed
|
||||
if len(directories) > 1:
|
||||
raise click.ClickException(
|
||||
"Cannot pass multiple directories. Pass a single directory as config_dir."
|
||||
)
|
||||
elif len(directories) == 1:
|
||||
kwargs["config_dir"] = pathlib.Path(directories[0])
|
||||
# if files is a single directory, use that as config_dir=
|
||||
if 1 == len(files) and os.path.isdir(files[0]):
|
||||
kwargs["config_dir"] = pathlib.Path(files[0])
|
||||
files = []
|
||||
|
||||
# Verify list of files, create if needed (and --create)
|
||||
for file in file_paths:
|
||||
for file in files:
|
||||
if not pathlib.Path(file).exists():
|
||||
if create:
|
||||
sqlite3.connect(file).execute("vacuum")
|
||||
|
|
@ -627,32 +573,8 @@ def serve(
|
|||
)
|
||||
)
|
||||
|
||||
# Check for duplicate files by resolving all paths to their absolute forms
|
||||
# Collect all database files that will be loaded (explicit files + config_dir files)
|
||||
all_db_files = []
|
||||
|
||||
# Add explicit files
|
||||
for file in file_paths:
|
||||
all_db_files.append((file, pathlib.Path(file).resolve()))
|
||||
|
||||
# Add config_dir databases if config_dir is set
|
||||
if "config_dir" in kwargs:
|
||||
config_dir = kwargs["config_dir"]
|
||||
for ext in ("db", "sqlite", "sqlite3"):
|
||||
for db_file in config_dir.glob(f"*.{ext}"):
|
||||
all_db_files.append((str(db_file), db_file.resolve()))
|
||||
|
||||
# Check for duplicates
|
||||
seen = {}
|
||||
for original_path, resolved_path in all_db_files:
|
||||
if resolved_path in seen:
|
||||
raise click.ClickException(
|
||||
f"Duplicate database file: '{original_path}' and '{seen[resolved_path]}' "
|
||||
f"both refer to {resolved_path}"
|
||||
)
|
||||
seen[resolved_path] = original_path
|
||||
|
||||
files = file_paths
|
||||
# De-duplicate files so 'datasette db.db db.db' only attaches one /db
|
||||
files = list(dict.fromkeys(files))
|
||||
|
||||
try:
|
||||
ds = Datasette(files, **kwargs)
|
||||
|
|
@ -666,38 +588,15 @@ def serve(
|
|||
return ds
|
||||
|
||||
# Run the "startup" plugin hooks
|
||||
run_sync(ds.invoke_startup)
|
||||
asyncio.get_event_loop().run_until_complete(ds.invoke_startup())
|
||||
|
||||
# Run async soundness checks - but only if we're not under pytest
|
||||
run_sync(lambda: check_databases(ds))
|
||||
|
||||
if headers and not get:
|
||||
raise click.ClickException("--headers can only be used with --get")
|
||||
|
||||
if token and not get:
|
||||
raise click.ClickException("--token can only be used with --get")
|
||||
asyncio.get_event_loop().run_until_complete(check_databases(ds))
|
||||
|
||||
if get:
|
||||
client = TestClient(ds)
|
||||
request_headers = {}
|
||||
if token:
|
||||
request_headers["Authorization"] = "Bearer {}".format(token)
|
||||
cookies = {}
|
||||
if actor:
|
||||
cookies["ds_actor"] = client.actor_cookie(json.loads(actor))
|
||||
response = client.get(get, headers=request_headers, cookies=cookies)
|
||||
|
||||
if headers:
|
||||
# Output HTTP status code, headers, two newlines, then the response body
|
||||
click.echo(f"HTTP/1.1 {response.status}")
|
||||
for key, value in response.headers.items():
|
||||
click.echo(f"{key}: {value}")
|
||||
if response.text:
|
||||
click.echo()
|
||||
click.echo(response.text)
|
||||
else:
|
||||
click.echo(response.text)
|
||||
|
||||
response = client.get(get)
|
||||
click.echo(response.text)
|
||||
exit_code = 0 if response.status == 200 else 1
|
||||
sys.exit(exit_code)
|
||||
return
|
||||
|
|
@ -705,15 +604,16 @@ def serve(
|
|||
# Start the server
|
||||
url = None
|
||||
if root:
|
||||
ds.root_enabled = True
|
||||
url = "http://{}:{}{}?token={}".format(
|
||||
host, port, ds.urls.path("-/auth-token"), ds._root_token
|
||||
)
|
||||
click.echo(url)
|
||||
print(url)
|
||||
if open_browser:
|
||||
if url is None:
|
||||
# Figure out most convenient URL - to table, database or homepage
|
||||
path = run_sync(lambda: initial_path_for_datasette(ds))
|
||||
path = asyncio.get_event_loop().run_until_complete(
|
||||
initial_path_for_datasette(ds)
|
||||
)
|
||||
url = f"http://{host}:{port}{path}"
|
||||
webbrowser.open(url)
|
||||
uvicorn_kwargs = dict(
|
||||
|
|
@ -728,131 +628,6 @@ def serve(
|
|||
uvicorn.run(ds.app(), **uvicorn_kwargs)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument("id")
|
||||
@click.option(
|
||||
"--secret",
|
||||
help="Secret used for signing the API tokens",
|
||||
envvar="DATASETTE_SECRET",
|
||||
required=True,
|
||||
)
|
||||
@click.option(
|
||||
"-e",
|
||||
"--expires-after",
|
||||
help="Token should expire after this many seconds",
|
||||
type=int,
|
||||
)
|
||||
@click.option(
|
||||
"alls",
|
||||
"-a",
|
||||
"--all",
|
||||
type=str,
|
||||
metavar="ACTION",
|
||||
multiple=True,
|
||||
help="Restrict token to this action",
|
||||
)
|
||||
@click.option(
|
||||
"databases",
|
||||
"-d",
|
||||
"--database",
|
||||
type=(str, str),
|
||||
metavar="DB ACTION",
|
||||
multiple=True,
|
||||
help="Restrict token to this action on this database",
|
||||
)
|
||||
@click.option(
|
||||
"resources",
|
||||
"-r",
|
||||
"--resource",
|
||||
type=(str, str, str),
|
||||
metavar="DB RESOURCE ACTION",
|
||||
multiple=True,
|
||||
help="Restrict token to this action on this database resource (a table, SQL view or named query)",
|
||||
)
|
||||
@click.option(
|
||||
"--debug",
|
||||
help="Show decoded token",
|
||||
is_flag=True,
|
||||
)
|
||||
@click.option(
|
||||
"--plugins-dir",
|
||||
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
||||
help="Path to directory containing custom plugins",
|
||||
)
|
||||
def create_token(
|
||||
id, secret, expires_after, alls, databases, resources, debug, plugins_dir
|
||||
):
|
||||
"""
|
||||
Create a signed API token for the specified actor ID
|
||||
|
||||
Example:
|
||||
|
||||
datasette create-token root --secret mysecret
|
||||
|
||||
To allow only "view-database-download" for all databases:
|
||||
|
||||
\b
|
||||
datasette create-token root --secret mysecret \\
|
||||
--all view-database-download
|
||||
|
||||
To allow "create-table" against a specific database:
|
||||
|
||||
\b
|
||||
datasette create-token root --secret mysecret \\
|
||||
--database mydb create-table
|
||||
|
||||
To allow "insert-row" against a specific table:
|
||||
|
||||
\b
|
||||
datasette create-token root --secret myscret \\
|
||||
--resource mydb mytable insert-row
|
||||
|
||||
Restricted actions can be specified multiple times using
|
||||
multiple --all, --database, and --resource options.
|
||||
|
||||
Add --debug to see a decoded version of the token.
|
||||
"""
|
||||
ds = Datasette(secret=secret, plugins_dir=plugins_dir)
|
||||
|
||||
# Run ds.invoke_startup() in an event loop
|
||||
run_sync(ds.invoke_startup)
|
||||
|
||||
# Warn about any unknown actions
|
||||
actions = []
|
||||
actions.extend(alls)
|
||||
actions.extend([p[1] for p in databases])
|
||||
actions.extend([p[2] for p in resources])
|
||||
for action in actions:
|
||||
if not ds.actions.get(action):
|
||||
click.secho(
|
||||
f" Unknown permission: {action} ",
|
||||
fg="red",
|
||||
err=True,
|
||||
)
|
||||
|
||||
restrict_database = {}
|
||||
for database, action in databases:
|
||||
restrict_database.setdefault(database, []).append(action)
|
||||
restrict_resource = {}
|
||||
for database, resource, action in resources:
|
||||
restrict_resource.setdefault(database, {}).setdefault(resource, []).append(
|
||||
action
|
||||
)
|
||||
|
||||
token = ds.create_token(
|
||||
id,
|
||||
expires_after=expires_after,
|
||||
restrict_all=alls,
|
||||
restrict_database=restrict_database,
|
||||
restrict_resource=restrict_resource,
|
||||
)
|
||||
click.echo(token)
|
||||
if debug:
|
||||
encoded = token[len("dstok_") :]
|
||||
click.echo("\nDecoded:\n")
|
||||
click.echo(json.dumps(ds.unsign(encoded, namespace="token"), indent=2))
|
||||
|
||||
|
||||
pm.hook.register_commands(cli=cli)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@ from collections import namedtuple
|
|||
from pathlib import Path
|
||||
import janus
|
||||
import queue
|
||||
import sqlite_utils
|
||||
import sys
|
||||
import threading
|
||||
import uuid
|
||||
|
|
@ -15,13 +14,11 @@ from .utils import (
|
|||
detect_spatialite,
|
||||
get_all_foreign_keys,
|
||||
get_outbound_foreign_keys,
|
||||
md5_not_usedforsecurity,
|
||||
sqlite_timelimit,
|
||||
sqlite3,
|
||||
table_columns,
|
||||
table_column_details,
|
||||
)
|
||||
from .utils.sqlite import sqlite_version
|
||||
from .inspect import inspect_hash
|
||||
|
||||
connections = threading.local()
|
||||
|
|
@ -30,22 +27,10 @@ AttachedDatabase = namedtuple("AttachedDatabase", ("seq", "name", "file"))
|
|||
|
||||
|
||||
class Database:
|
||||
# For table counts stop at this many rows:
|
||||
count_limit = 10000
|
||||
_thread_local_id_counter = 1
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
ds,
|
||||
path=None,
|
||||
is_mutable=True,
|
||||
is_memory=False,
|
||||
memory_name=None,
|
||||
mode=None,
|
||||
self, ds, path=None, is_mutable=True, is_memory=False, memory_name=None
|
||||
):
|
||||
self.name = None
|
||||
self._thread_local_id = f"x{self._thread_local_id_counter}"
|
||||
Database._thread_local_id_counter += 1
|
||||
self.route = None
|
||||
self.ds = ds
|
||||
self.path = path
|
||||
|
|
@ -64,7 +49,6 @@ class Database:
|
|||
self._write_connection = None
|
||||
# This is used to track all file connections so they can be closed
|
||||
self._all_file_connections = []
|
||||
self.mode = mode
|
||||
|
||||
@property
|
||||
def cached_table_counts(self):
|
||||
|
|
@ -78,12 +62,6 @@ class Database:
|
|||
}
|
||||
return self._cached_table_counts
|
||||
|
||||
@property
|
||||
def color(self):
|
||||
if self.hash:
|
||||
return self.hash[:6]
|
||||
return md5_not_usedforsecurity(self.name)[:6]
|
||||
|
||||
def suggest_name(self):
|
||||
if self.path:
|
||||
return Path(self.path).stem
|
||||
|
|
@ -93,20 +71,18 @@ class Database:
|
|||
return "db"
|
||||
|
||||
def connect(self, write=False):
|
||||
extra_kwargs = {}
|
||||
if write:
|
||||
extra_kwargs["isolation_level"] = "IMMEDIATE"
|
||||
if self.memory_name:
|
||||
uri = "file:{}?mode=memory&cache=shared".format(self.memory_name)
|
||||
conn = sqlite3.connect(
|
||||
uri, uri=True, check_same_thread=False, **extra_kwargs
|
||||
uri,
|
||||
uri=True,
|
||||
check_same_thread=False,
|
||||
)
|
||||
if not write:
|
||||
conn.execute("PRAGMA query_only=1")
|
||||
return conn
|
||||
if self.is_memory:
|
||||
return sqlite3.connect(":memory:", uri=True)
|
||||
|
||||
# mode=ro or immutable=1?
|
||||
if self.is_mutable:
|
||||
qs = "?mode=ro"
|
||||
|
|
@ -117,10 +93,8 @@ class Database:
|
|||
assert not (write and not self.is_mutable)
|
||||
if write:
|
||||
qs = ""
|
||||
if self.mode is not None:
|
||||
qs = f"?mode={self.mode}"
|
||||
conn = sqlite3.connect(
|
||||
f"file:{self.path}{qs}", uri=True, check_same_thread=False, **extra_kwargs
|
||||
f"file:{self.path}{qs}", uri=True, check_same_thread=False
|
||||
)
|
||||
self._all_file_connections.append(conn)
|
||||
return conn
|
||||
|
|
@ -132,7 +106,8 @@ class Database:
|
|||
|
||||
async def execute_write(self, sql, params=None, block=True):
|
||||
def _inner(conn):
|
||||
return conn.execute(sql, params or [])
|
||||
with conn:
|
||||
return conn.execute(sql, params or [])
|
||||
|
||||
with trace("sql", database=self.name, sql=sql.strip(), params=params):
|
||||
results = await self.execute_write_fn(_inner, block=block)
|
||||
|
|
@ -140,12 +115,11 @@ class Database:
|
|||
|
||||
async def execute_write_script(self, sql, block=True):
|
||||
def _inner(conn):
|
||||
return conn.executescript(sql)
|
||||
with conn:
|
||||
return conn.executescript(sql)
|
||||
|
||||
with trace("sql", database=self.name, sql=sql.strip(), executescript=True):
|
||||
results = await self.execute_write_fn(
|
||||
_inner, block=block, transaction=False
|
||||
)
|
||||
results = await self.execute_write_fn(_inner, block=block)
|
||||
return results
|
||||
|
||||
async def execute_write_many(self, sql, params_seq, block=True):
|
||||
|
|
@ -158,7 +132,8 @@ class Database:
|
|||
count += 1
|
||||
yield param
|
||||
|
||||
return conn.executemany(sql, count_params(params_seq)), count
|
||||
with conn:
|
||||
return conn.executemany(sql, count_params(params_seq)), count
|
||||
|
||||
with trace(
|
||||
"sql", database=self.name, sql=sql.strip(), executemany=True
|
||||
|
|
@ -167,60 +142,25 @@ class Database:
|
|||
kwargs["count"] = count
|
||||
return results
|
||||
|
||||
async def execute_isolated_fn(self, fn):
|
||||
# Open a new connection just for the duration of this function
|
||||
# blocking the write queue to avoid any writes occurring during it
|
||||
if self.ds.executor is None:
|
||||
# non-threaded mode
|
||||
isolated_connection = self.connect(write=True)
|
||||
try:
|
||||
result = fn(isolated_connection)
|
||||
finally:
|
||||
isolated_connection.close()
|
||||
try:
|
||||
self._all_file_connections.remove(isolated_connection)
|
||||
except ValueError:
|
||||
# Was probably a memory connection
|
||||
pass
|
||||
return result
|
||||
else:
|
||||
# Threaded mode - send to write thread
|
||||
return await self._send_to_write_thread(fn, isolated_connection=True)
|
||||
|
||||
async def execute_write_fn(self, fn, block=True, transaction=True):
|
||||
async def execute_write_fn(self, fn, block=True):
|
||||
if self.ds.executor is None:
|
||||
# non-threaded mode
|
||||
if self._write_connection is None:
|
||||
self._write_connection = self.connect(write=True)
|
||||
self.ds._prepare_connection(self._write_connection, self.name)
|
||||
if transaction:
|
||||
with self._write_connection:
|
||||
return fn(self._write_connection)
|
||||
else:
|
||||
return fn(self._write_connection)
|
||||
else:
|
||||
return await self._send_to_write_thread(
|
||||
fn, block=block, transaction=transaction
|
||||
)
|
||||
return fn(self._write_connection)
|
||||
|
||||
async def _send_to_write_thread(
|
||||
self, fn, block=True, isolated_connection=False, transaction=True
|
||||
):
|
||||
# threaded mode
|
||||
task_id = uuid.uuid5(uuid.NAMESPACE_DNS, "datasette.io")
|
||||
if self._write_queue is None:
|
||||
self._write_queue = queue.Queue()
|
||||
if self._write_thread is None:
|
||||
self._write_thread = threading.Thread(
|
||||
target=self._execute_writes, daemon=True
|
||||
)
|
||||
self._write_thread.name = "_execute_writes for database {}".format(
|
||||
self.name
|
||||
)
|
||||
self._write_thread.start()
|
||||
task_id = uuid.uuid5(uuid.NAMESPACE_DNS, "datasette.io")
|
||||
reply_queue = janus.Queue()
|
||||
self._write_queue.put(
|
||||
WriteTask(fn, task_id, reply_queue, isolated_connection, transaction)
|
||||
)
|
||||
self._write_queue.put(WriteTask(fn, task_id, reply_queue))
|
||||
if block:
|
||||
result = await reply_queue.async_q.get()
|
||||
if isinstance(result, Exception):
|
||||
|
|
@ -245,32 +185,12 @@ class Database:
|
|||
if conn_exception is not None:
|
||||
result = conn_exception
|
||||
else:
|
||||
if task.isolated_connection:
|
||||
isolated_connection = self.connect(write=True)
|
||||
try:
|
||||
result = task.fn(isolated_connection)
|
||||
except Exception as e:
|
||||
sys.stderr.write("{}\n".format(e))
|
||||
sys.stderr.flush()
|
||||
result = e
|
||||
finally:
|
||||
isolated_connection.close()
|
||||
try:
|
||||
self._all_file_connections.remove(isolated_connection)
|
||||
except ValueError:
|
||||
# Was probably a memory connection
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
if task.transaction:
|
||||
with conn:
|
||||
result = task.fn(conn)
|
||||
else:
|
||||
result = task.fn(conn)
|
||||
except Exception as e:
|
||||
sys.stderr.write("{}\n".format(e))
|
||||
sys.stderr.flush()
|
||||
result = e
|
||||
try:
|
||||
result = task.fn(conn)
|
||||
except Exception as e:
|
||||
sys.stderr.write("{}\n".format(e))
|
||||
sys.stderr.flush()
|
||||
result = e
|
||||
task.reply_queue.sync_q.put(result)
|
||||
|
||||
async def execute_fn(self, fn):
|
||||
|
|
@ -283,11 +203,11 @@ class Database:
|
|||
|
||||
# threaded mode
|
||||
def in_thread():
|
||||
conn = getattr(connections, self._thread_local_id, None)
|
||||
conn = getattr(connections, self.name, None)
|
||||
if not conn:
|
||||
conn = self.connect()
|
||||
self.ds._prepare_connection(conn, self.name)
|
||||
setattr(connections, self._thread_local_id, conn)
|
||||
setattr(connections, self.name, conn)
|
||||
return fn(conn)
|
||||
|
||||
return await asyncio.get_event_loop().run_in_executor(
|
||||
|
|
@ -385,7 +305,7 @@ class Database:
|
|||
try:
|
||||
table_count = (
|
||||
await self.execute(
|
||||
f"select count(*) from (select * from [{table}] limit {self.count_limit + 1})",
|
||||
f"select count(*) from [{table}]",
|
||||
custom_time_limit=limit,
|
||||
)
|
||||
).rows[0][0]
|
||||
|
|
@ -410,12 +330,7 @@ class Database:
|
|||
# But SQLite prior to 3.16.0 doesn't support pragma functions
|
||||
results = await self.execute("PRAGMA database_list;")
|
||||
# {'seq': 0, 'name': 'main', 'file': ''}
|
||||
return [
|
||||
AttachedDatabase(*row)
|
||||
for row in results.rows
|
||||
# Filter out the SQLite internal "temp" database, refs #2557
|
||||
if row["seq"] > 0 and row["name"] != "temp"
|
||||
]
|
||||
return [AttachedDatabase(*row) for row in results.rows if row["seq"] > 0]
|
||||
|
||||
async def table_exists(self, table):
|
||||
results = await self.execute(
|
||||
|
|
@ -448,38 +363,12 @@ class Database:
|
|||
return await self.execute_fn(lambda conn: detect_fts(conn, table))
|
||||
|
||||
async def label_column_for_table(self, table):
|
||||
explicit_label_column = (await self.ds.table_config(self.name, table)).get(
|
||||
explicit_label_column = self.ds.table_metadata(self.name, table).get(
|
||||
"label_column"
|
||||
)
|
||||
if explicit_label_column:
|
||||
return explicit_label_column
|
||||
|
||||
def column_details(conn):
|
||||
# Returns {column_name: (type, is_unique)}
|
||||
db = sqlite_utils.Database(conn)
|
||||
columns = db[table].columns_dict
|
||||
indexes = db[table].indexes
|
||||
details = {}
|
||||
for name in columns:
|
||||
is_unique = any(
|
||||
index
|
||||
for index in indexes
|
||||
if index.columns == [name] and index.unique
|
||||
)
|
||||
details[name] = (columns[name], is_unique)
|
||||
return details
|
||||
|
||||
column_details = await self.execute_fn(column_details)
|
||||
# Is there just one unique column that's text?
|
||||
unique_text_columns = [
|
||||
name
|
||||
for name, (type_, is_unique) in column_details.items()
|
||||
if is_unique and type_ is str
|
||||
]
|
||||
if len(unique_text_columns) == 1:
|
||||
return unique_text_columns[0]
|
||||
|
||||
column_names = list(column_details.keys())
|
||||
column_names = await self.execute_fn(lambda conn: table_columns(conn, table))
|
||||
# Is there a name or title column?
|
||||
name_or_title = [c for c in column_names if c.lower() in ("name", "title")]
|
||||
if name_or_title:
|
||||
|
|
@ -489,7 +378,6 @@ class Database:
|
|||
column_names
|
||||
and len(column_names) == 2
|
||||
and ("id" in column_names or "pk" in column_names)
|
||||
and not set(column_names) == {"id", "pk"}
|
||||
):
|
||||
return [c for c in column_names if c not in ("id", "pk")][0]
|
||||
# Couldn't find a label:
|
||||
|
|
@ -501,107 +389,21 @@ class Database:
|
|||
)
|
||||
|
||||
async def hidden_table_names(self):
|
||||
hidden_tables = []
|
||||
# Add any tables marked as hidden in config
|
||||
db_config = self.ds.config.get("databases", {}).get(self.name, {})
|
||||
if "tables" in db_config:
|
||||
hidden_tables += [
|
||||
t for t in db_config["tables"] if db_config["tables"][t].get("hidden")
|
||||
]
|
||||
|
||||
if sqlite_version()[1] >= 37:
|
||||
hidden_tables += [
|
||||
x[0]
|
||||
for x in await self.execute(
|
||||
"""
|
||||
with shadow_tables as (
|
||||
select name
|
||||
from pragma_table_list
|
||||
where [type] = 'shadow'
|
||||
order by name
|
||||
),
|
||||
core_tables as (
|
||||
select name
|
||||
from sqlite_master
|
||||
WHERE name in ('sqlite_stat1', 'sqlite_stat2', 'sqlite_stat3', 'sqlite_stat4')
|
||||
OR substr(name, 1, 1) == '_'
|
||||
),
|
||||
combined as (
|
||||
select name from shadow_tables
|
||||
union all
|
||||
select name from core_tables
|
||||
)
|
||||
select name from combined order by 1
|
||||
# Mark tables 'hidden' if they relate to FTS virtual tables
|
||||
hidden_tables = [
|
||||
r[0]
|
||||
for r in (
|
||||
await self.execute(
|
||||
"""
|
||||
select name from sqlite_master
|
||||
where rootpage = 0
|
||||
and (
|
||||
sql like '%VIRTUAL TABLE%USING FTS%'
|
||||
) or name in ('sqlite_stat1', 'sqlite_stat2', 'sqlite_stat3', 'sqlite_stat4')
|
||||
"""
|
||||
)
|
||||
]
|
||||
else:
|
||||
hidden_tables += [
|
||||
x[0]
|
||||
for x in await self.execute(
|
||||
"""
|
||||
WITH base AS (
|
||||
SELECT name
|
||||
FROM sqlite_master
|
||||
WHERE name IN ('sqlite_stat1', 'sqlite_stat2', 'sqlite_stat3', 'sqlite_stat4')
|
||||
OR substr(name, 1, 1) == '_'
|
||||
),
|
||||
fts_suffixes AS (
|
||||
SELECT column1 AS suffix
|
||||
FROM (VALUES ('_data'), ('_idx'), ('_docsize'), ('_content'), ('_config'))
|
||||
),
|
||||
fts5_names AS (
|
||||
SELECT name
|
||||
FROM sqlite_master
|
||||
WHERE sql LIKE '%VIRTUAL TABLE%USING FTS%'
|
||||
),
|
||||
fts5_shadow_tables AS (
|
||||
SELECT
|
||||
printf('%s%s', fts5_names.name, fts_suffixes.suffix) AS name
|
||||
FROM fts5_names
|
||||
JOIN fts_suffixes
|
||||
),
|
||||
fts3_suffixes AS (
|
||||
SELECT column1 AS suffix
|
||||
FROM (VALUES ('_content'), ('_segdir'), ('_segments'), ('_stat'), ('_docsize'))
|
||||
),
|
||||
fts3_names AS (
|
||||
SELECT name
|
||||
FROM sqlite_master
|
||||
WHERE sql LIKE '%VIRTUAL TABLE%USING FTS3%'
|
||||
OR sql LIKE '%VIRTUAL TABLE%USING FTS4%'
|
||||
),
|
||||
fts3_shadow_tables AS (
|
||||
SELECT
|
||||
printf('%s%s', fts3_names.name, fts3_suffixes.suffix) AS name
|
||||
FROM fts3_names
|
||||
JOIN fts3_suffixes
|
||||
),
|
||||
final AS (
|
||||
SELECT name FROM base
|
||||
UNION ALL
|
||||
SELECT name FROM fts5_shadow_tables
|
||||
UNION ALL
|
||||
SELECT name FROM fts3_shadow_tables
|
||||
)
|
||||
SELECT name FROM final ORDER BY 1
|
||||
"""
|
||||
)
|
||||
]
|
||||
# Also hide any FTS tables that have a content= argument
|
||||
hidden_tables += [
|
||||
x[0]
|
||||
for x in await self.execute(
|
||||
"""
|
||||
SELECT name
|
||||
FROM sqlite_master
|
||||
WHERE sql LIKE '%VIRTUAL TABLE%'
|
||||
AND sql LIKE '%USING FTS%'
|
||||
AND sql LIKE '%content=%'
|
||||
"""
|
||||
)
|
||||
).rows
|
||||
]
|
||||
|
||||
has_spatialite = await self.execute_fn(detect_spatialite)
|
||||
if has_spatialite:
|
||||
# Also hide Spatialite internal tables
|
||||
|
|
@ -630,6 +432,21 @@ class Database:
|
|||
)
|
||||
).rows
|
||||
]
|
||||
# Add any from metadata.json
|
||||
db_metadata = self.ds.metadata(database=self.name)
|
||||
if "tables" in db_metadata:
|
||||
hidden_tables += [
|
||||
t
|
||||
for t in db_metadata["tables"]
|
||||
if db_metadata["tables"][t].get("hidden")
|
||||
]
|
||||
# Also mark as hidden any tables which start with the name of a hidden table
|
||||
# e.g. "searchable_fts" implies "searchable_fts_content" should be hidden
|
||||
for table_name in await self.table_names():
|
||||
for hidden_table in hidden_tables[:]:
|
||||
if table_name.startswith(hidden_table):
|
||||
hidden_tables.append(table_name)
|
||||
continue
|
||||
|
||||
return hidden_tables
|
||||
|
||||
|
|
@ -681,14 +498,12 @@ class Database:
|
|||
|
||||
|
||||
class WriteTask:
|
||||
__slots__ = ("fn", "task_id", "reply_queue", "isolated_connection", "transaction")
|
||||
__slots__ = ("fn", "task_id", "reply_queue")
|
||||
|
||||
def __init__(self, fn, task_id, reply_queue, isolated_connection, transaction):
|
||||
def __init__(self, fn, task_id, reply_queue):
|
||||
self.fn = fn
|
||||
self.task_id = task_id
|
||||
self.reply_queue = reply_queue
|
||||
self.isolated_connection = isolated_connection
|
||||
self.transaction = transaction
|
||||
|
||||
|
||||
class QueryInterrupted(Exception):
|
||||
|
|
@ -697,9 +512,6 @@ class QueryInterrupted(Exception):
|
|||
self.sql = sql
|
||||
self.params = params
|
||||
|
||||
def __str__(self):
|
||||
return "QueryInterrupted: {}".format(self.e)
|
||||
|
||||
|
||||
class MultipleValues(Exception):
|
||||
pass
|
||||
|
|
@ -727,9 +539,6 @@ class Results:
|
|||
else:
|
||||
raise MultipleValues
|
||||
|
||||
def dicts(self):
|
||||
return [dict(row) for row in self.rows]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.rows)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,101 +0,0 @@
|
|||
from datasette import hookimpl
|
||||
from datasette.permissions import Action
|
||||
from datasette.resources import (
|
||||
DatabaseResource,
|
||||
TableResource,
|
||||
QueryResource,
|
||||
)
|
||||
|
||||
|
||||
@hookimpl
|
||||
def register_actions():
|
||||
"""Register the core Datasette actions."""
|
||||
return (
|
||||
# Global actions (no resource_class)
|
||||
Action(
|
||||
name="view-instance",
|
||||
abbr="vi",
|
||||
description="View Datasette instance",
|
||||
),
|
||||
Action(
|
||||
name="permissions-debug",
|
||||
abbr="pd",
|
||||
description="Access permission debug tool",
|
||||
),
|
||||
Action(
|
||||
name="debug-menu",
|
||||
abbr="dm",
|
||||
description="View debug menu items",
|
||||
),
|
||||
# Database-level actions (parent-level)
|
||||
Action(
|
||||
name="view-database",
|
||||
abbr="vd",
|
||||
description="View database",
|
||||
resource_class=DatabaseResource,
|
||||
),
|
||||
Action(
|
||||
name="view-database-download",
|
||||
abbr="vdd",
|
||||
description="Download database file",
|
||||
resource_class=DatabaseResource,
|
||||
also_requires="view-database",
|
||||
),
|
||||
Action(
|
||||
name="execute-sql",
|
||||
abbr="es",
|
||||
description="Execute read-only SQL queries",
|
||||
resource_class=DatabaseResource,
|
||||
also_requires="view-database",
|
||||
),
|
||||
Action(
|
||||
name="create-table",
|
||||
abbr="ct",
|
||||
description="Create tables",
|
||||
resource_class=DatabaseResource,
|
||||
),
|
||||
# Table-level actions (child-level)
|
||||
Action(
|
||||
name="view-table",
|
||||
abbr="vt",
|
||||
description="View table",
|
||||
resource_class=TableResource,
|
||||
),
|
||||
Action(
|
||||
name="insert-row",
|
||||
abbr="ir",
|
||||
description="Insert rows",
|
||||
resource_class=TableResource,
|
||||
),
|
||||
Action(
|
||||
name="delete-row",
|
||||
abbr="dr",
|
||||
description="Delete rows",
|
||||
resource_class=TableResource,
|
||||
),
|
||||
Action(
|
||||
name="update-row",
|
||||
abbr="ur",
|
||||
description="Update rows",
|
||||
resource_class=TableResource,
|
||||
),
|
||||
Action(
|
||||
name="alter-table",
|
||||
abbr="at",
|
||||
description="Alter tables",
|
||||
resource_class=TableResource,
|
||||
),
|
||||
Action(
|
||||
name="drop-table",
|
||||
abbr="dt",
|
||||
description="Drop tables",
|
||||
resource_class=TableResource,
|
||||
),
|
||||
# Query-level actions (child-level)
|
||||
Action(
|
||||
name="view-query",
|
||||
abbr="vq",
|
||||
description="View named query results",
|
||||
resource_class=QueryResource,
|
||||
),
|
||||
)
|
||||
|
|
@ -24,12 +24,9 @@ def now(key, request):
|
|||
if key == "epoch":
|
||||
return int(time.time())
|
||||
elif key == "date_utc":
|
||||
return datetime.datetime.now(datetime.timezone.utc).date().isoformat()
|
||||
return datetime.datetime.utcnow().date().isoformat()
|
||||
elif key == "datetime_utc":
|
||||
return (
|
||||
datetime.datetime.now(datetime.timezone.utc).strftime(r"%Y-%m-%dT%H:%M:%S")
|
||||
+ "Z"
|
||||
)
|
||||
return datetime.datetime.utcnow().strftime(r"%Y-%m-%dT%H:%M:%S") + "Z"
|
||||
else:
|
||||
raise KeyError
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ from datasette import hookimpl
|
|||
@hookimpl
|
||||
def menu_links(datasette, actor):
|
||||
async def inner():
|
||||
if not await datasette.allowed(action="debug-menu", actor=actor):
|
||||
if not await datasette.permission_allowed(actor, "debug-menu"):
|
||||
return []
|
||||
|
||||
return [
|
||||
|
|
@ -17,6 +17,10 @@ def menu_links(datasette, actor):
|
|||
"href": datasette.urls.path("/-/versions"),
|
||||
"label": "Version info",
|
||||
},
|
||||
{
|
||||
"href": datasette.urls.path("/-/metadata"),
|
||||
"label": "Metadata",
|
||||
},
|
||||
{
|
||||
"href": datasette.urls.path("/-/settings"),
|
||||
"label": "Settings",
|
||||
|
|
|
|||
218
datasette/default_permissions.py
Normal file
218
datasette/default_permissions.py
Normal file
|
|
@ -0,0 +1,218 @@
|
|||
from datasette import hookimpl
|
||||
from datasette.utils import actor_matches_allow
|
||||
import click
|
||||
import itsdangerous
|
||||
import json
|
||||
import time
|
||||
|
||||
|
||||
@hookimpl(tryfirst=True, specname="permission_allowed")
|
||||
def permission_allowed_default(datasette, actor, action, resource):
|
||||
async def inner():
|
||||
# id=root gets some special permissions:
|
||||
if action in (
|
||||
"permissions-debug",
|
||||
"debug-menu",
|
||||
"insert-row",
|
||||
"create-table",
|
||||
"drop-table",
|
||||
"delete-row",
|
||||
"update-row",
|
||||
):
|
||||
if actor and actor.get("id") == "root":
|
||||
return True
|
||||
|
||||
# Resolve metadata view permissions
|
||||
if action in (
|
||||
"view-instance",
|
||||
"view-database",
|
||||
"view-table",
|
||||
"view-query",
|
||||
"execute-sql",
|
||||
):
|
||||
result = await _resolve_metadata_view_permissions(
|
||||
datasette, actor, action, resource
|
||||
)
|
||||
if result is not None:
|
||||
return result
|
||||
|
||||
# Check custom permissions: blocks
|
||||
return await _resolve_metadata_permissions_blocks(
|
||||
datasette, actor, action, resource
|
||||
)
|
||||
|
||||
return inner
|
||||
|
||||
|
||||
async def _resolve_metadata_permissions_blocks(datasette, actor, action, resource):
|
||||
# Check custom permissions: blocks - not yet implemented
|
||||
return None
|
||||
|
||||
|
||||
async def _resolve_metadata_view_permissions(datasette, actor, action, resource):
|
||||
if action == "view-instance":
|
||||
allow = datasette.metadata("allow")
|
||||
if allow is not None:
|
||||
return actor_matches_allow(actor, allow)
|
||||
elif action == "view-database":
|
||||
if resource == "_internal" and (actor is None or actor.get("id") != "root"):
|
||||
return False
|
||||
database_allow = datasette.metadata("allow", database=resource)
|
||||
if database_allow is None:
|
||||
return None
|
||||
return actor_matches_allow(actor, database_allow)
|
||||
elif action == "view-table":
|
||||
database, table = resource
|
||||
tables = datasette.metadata("tables", database=database) or {}
|
||||
table_allow = (tables.get(table) or {}).get("allow")
|
||||
if table_allow is None:
|
||||
return None
|
||||
return actor_matches_allow(actor, table_allow)
|
||||
elif action == "view-query":
|
||||
# Check if this query has a "allow" block in metadata
|
||||
database, query_name = resource
|
||||
query = await datasette.get_canned_query(database, query_name, actor)
|
||||
assert query is not None
|
||||
allow = query.get("allow")
|
||||
if allow is None:
|
||||
return None
|
||||
return actor_matches_allow(actor, allow)
|
||||
elif action == "execute-sql":
|
||||
# Use allow_sql block from database block, or from top-level
|
||||
database_allow_sql = datasette.metadata("allow_sql", database=resource)
|
||||
if database_allow_sql is None:
|
||||
database_allow_sql = datasette.metadata("allow_sql")
|
||||
if database_allow_sql is None:
|
||||
return None
|
||||
return actor_matches_allow(actor, database_allow_sql)
|
||||
|
||||
|
||||
@hookimpl(specname="permission_allowed")
|
||||
def permission_allowed_actor_restrictions(actor, action, resource):
|
||||
if actor is None:
|
||||
return None
|
||||
if "_r" not in actor:
|
||||
# No restrictions, so we have no opinion
|
||||
return None
|
||||
_r = actor.get("_r")
|
||||
action_initials = "".join([word[0] for word in action.split("-")])
|
||||
# If _r is defined then we use those to further restrict the actor
|
||||
# Crucially, we only use this to say NO (return False) - we never
|
||||
# use it to return YES (True) because that might over-ride other
|
||||
# restrictions placed on this actor
|
||||
all_allowed = _r.get("a")
|
||||
if all_allowed is not None:
|
||||
assert isinstance(all_allowed, list)
|
||||
if action_initials in all_allowed:
|
||||
return None
|
||||
# How about for the current database?
|
||||
if action in ("view-database", "view-database-download", "execute-sql"):
|
||||
database_allowed = _r.get("d", {}).get(resource)
|
||||
if database_allowed is not None:
|
||||
assert isinstance(database_allowed, list)
|
||||
if action_initials in database_allowed:
|
||||
return None
|
||||
# Or the current table? That's any time the resource is (database, table)
|
||||
if resource is not None and not isinstance(resource, str) and len(resource) == 2:
|
||||
database, table = resource
|
||||
table_allowed = _r.get("t", {}).get(database, {}).get(table)
|
||||
# TODO: What should this do for canned queries?
|
||||
if table_allowed is not None:
|
||||
assert isinstance(table_allowed, list)
|
||||
if action_initials in table_allowed:
|
||||
return None
|
||||
# This action is not specifically allowed, so reject it
|
||||
return False
|
||||
|
||||
|
||||
@hookimpl
|
||||
def actor_from_request(datasette, request):
|
||||
prefix = "dstok_"
|
||||
if not datasette.setting("allow_signed_tokens"):
|
||||
return None
|
||||
max_signed_tokens_ttl = datasette.setting("max_signed_tokens_ttl")
|
||||
authorization = request.headers.get("authorization")
|
||||
if not authorization:
|
||||
return None
|
||||
if not authorization.startswith("Bearer "):
|
||||
return None
|
||||
token = authorization[len("Bearer ") :]
|
||||
if not token.startswith(prefix):
|
||||
return None
|
||||
token = token[len(prefix) :]
|
||||
try:
|
||||
decoded = datasette.unsign(token, namespace="token")
|
||||
except itsdangerous.BadSignature:
|
||||
return None
|
||||
if "t" not in decoded:
|
||||
# Missing timestamp
|
||||
return None
|
||||
created = decoded["t"]
|
||||
if not isinstance(created, int):
|
||||
# Invalid timestamp
|
||||
return None
|
||||
duration = decoded.get("d")
|
||||
if duration is not None and not isinstance(duration, int):
|
||||
# Invalid duration
|
||||
return None
|
||||
if (duration is None and max_signed_tokens_ttl) or (
|
||||
duration is not None
|
||||
and max_signed_tokens_ttl
|
||||
and duration > max_signed_tokens_ttl
|
||||
):
|
||||
duration = max_signed_tokens_ttl
|
||||
if duration:
|
||||
if time.time() - created > duration:
|
||||
# Expired
|
||||
return None
|
||||
actor = {"id": decoded["a"], "token": "dstok"}
|
||||
if "_r" in decoded:
|
||||
actor["_r"] = decoded["_r"]
|
||||
if duration:
|
||||
actor["token_expires"] = created + duration
|
||||
return actor
|
||||
|
||||
|
||||
@hookimpl
|
||||
def register_commands(cli):
|
||||
from datasette.app import Datasette
|
||||
|
||||
@cli.command()
|
||||
@click.argument("id")
|
||||
@click.option(
|
||||
"--secret",
|
||||
help="Secret used for signing the API tokens",
|
||||
envvar="DATASETTE_SECRET",
|
||||
required=True,
|
||||
)
|
||||
@click.option(
|
||||
"-e",
|
||||
"--expires-after",
|
||||
help="Token should expire after this many seconds",
|
||||
type=int,
|
||||
)
|
||||
@click.option(
|
||||
"--debug",
|
||||
help="Show decoded token",
|
||||
is_flag=True,
|
||||
)
|
||||
def create_token(id, secret, expires_after, debug):
|
||||
"Create a signed API token for the specified actor ID"
|
||||
ds = Datasette(secret=secret)
|
||||
bits = {"a": id, "token": "dstok", "t": int(time.time())}
|
||||
if expires_after:
|
||||
bits["d"] = expires_after
|
||||
token = ds.sign(bits, namespace="token")
|
||||
click.echo("dstok_{}".format(token))
|
||||
if debug:
|
||||
click.echo("\nDecoded:\n")
|
||||
click.echo(json.dumps(ds.unsign(token, namespace="token"), indent=2))
|
||||
|
||||
|
||||
@hookimpl
|
||||
def skip_csrf(scope):
|
||||
# Skip CSRF check for requests with content-type: application/json
|
||||
if scope["type"] == "http":
|
||||
headers = scope.get("headers") or {}
|
||||
if dict(headers).get(b"content-type") == b"application/json":
|
||||
return True
|
||||
|
|
@ -1,59 +0,0 @@
|
|||
"""
|
||||
Default permission implementations for Datasette.
|
||||
|
||||
This module provides the built-in permission checking logic through implementations
|
||||
of the permission_resources_sql hook. The hooks are organized by their purpose:
|
||||
|
||||
1. Actor Restrictions - Enforces _r allowlists embedded in actor tokens
|
||||
2. Root User - Grants full access when --root flag is used
|
||||
3. Config Rules - Applies permissions from datasette.yaml
|
||||
4. Default Settings - Enforces default_allow_sql and default view permissions
|
||||
|
||||
IMPORTANT: These hooks return PermissionSQL objects that are combined using SQL
|
||||
UNION/INTERSECT operations. The order of evaluation is:
|
||||
- restriction_sql fields are INTERSECTed (all must match)
|
||||
- Regular sql fields are UNIONed and evaluated with cascading priority
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from datasette.app import Datasette
|
||||
|
||||
from datasette import hookimpl
|
||||
|
||||
# Re-export all hooks and public utilities
|
||||
from .restrictions import (
|
||||
actor_restrictions_sql,
|
||||
restrictions_allow_action,
|
||||
ActorRestrictions,
|
||||
)
|
||||
from .root import root_user_permissions_sql
|
||||
from .config import config_permissions_sql
|
||||
from .defaults import (
|
||||
default_allow_sql_check,
|
||||
default_action_permissions_sql,
|
||||
DEFAULT_ALLOW_ACTIONS,
|
||||
)
|
||||
from .tokens import actor_from_signed_api_token
|
||||
|
||||
|
||||
@hookimpl
|
||||
def skip_csrf(scope) -> Optional[bool]:
|
||||
"""Skip CSRF check for JSON content-type requests."""
|
||||
if scope["type"] == "http":
|
||||
headers = scope.get("headers") or {}
|
||||
if dict(headers).get(b"content-type") == b"application/json":
|
||||
return True
|
||||
return None
|
||||
|
||||
|
||||
@hookimpl
|
||||
def canned_queries(datasette: "Datasette", database: str, actor) -> dict:
|
||||
"""Return canned queries defined in datasette.yaml configuration."""
|
||||
queries = (
|
||||
((datasette.config or {}).get("databases") or {}).get(database) or {}
|
||||
).get("queries") or {}
|
||||
return queries
|
||||
|
|
@ -1,442 +0,0 @@
|
|||
"""
|
||||
Config-based permission handling for Datasette.
|
||||
|
||||
Applies permission rules from datasette.yaml configuration.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any, List, Optional, Set, Tuple
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from datasette.app import Datasette
|
||||
|
||||
from datasette import hookimpl
|
||||
from datasette.permissions import PermissionSQL
|
||||
from datasette.utils import actor_matches_allow
|
||||
|
||||
from .helpers import PermissionRowCollector, get_action_name_variants
|
||||
|
||||
|
||||
class ConfigPermissionProcessor:
|
||||
"""
|
||||
Processes permission rules from datasette.yaml configuration.
|
||||
|
||||
Configuration structure:
|
||||
|
||||
permissions: # Root-level permissions block
|
||||
view-instance:
|
||||
id: admin
|
||||
|
||||
databases:
|
||||
mydb:
|
||||
permissions: # Database-level permissions
|
||||
view-database:
|
||||
id: admin
|
||||
allow: # Database-level allow block (for view-*)
|
||||
id: viewer
|
||||
allow_sql: # execute-sql allow block
|
||||
id: analyst
|
||||
tables:
|
||||
users:
|
||||
permissions: # Table-level permissions
|
||||
view-table:
|
||||
id: admin
|
||||
allow: # Table-level allow block
|
||||
id: viewer
|
||||
queries:
|
||||
my_query:
|
||||
permissions: # Query-level permissions
|
||||
view-query:
|
||||
id: admin
|
||||
allow: # Query-level allow block
|
||||
id: viewer
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
datasette: "Datasette",
|
||||
actor: Optional[dict],
|
||||
action: str,
|
||||
):
|
||||
self.datasette = datasette
|
||||
self.actor = actor
|
||||
self.action = action
|
||||
self.config = datasette.config or {}
|
||||
self.collector = PermissionRowCollector(prefix="cfg")
|
||||
|
||||
# Pre-compute action variants
|
||||
self.action_checks = get_action_name_variants(datasette, action)
|
||||
self.action_obj = datasette.actions.get(action)
|
||||
|
||||
# Parse restrictions if present
|
||||
self.has_restrictions = actor and "_r" in actor if actor else False
|
||||
self.restrictions = actor.get("_r", {}) if actor else {}
|
||||
|
||||
# Pre-compute restriction info for efficiency
|
||||
self.restricted_databases: Set[str] = set()
|
||||
self.restricted_tables: Set[Tuple[str, str]] = set()
|
||||
|
||||
if self.has_restrictions:
|
||||
self.restricted_databases = {
|
||||
db_name
|
||||
for db_name, db_actions in (self.restrictions.get("d") or {}).items()
|
||||
if self.action_checks.intersection(db_actions)
|
||||
}
|
||||
self.restricted_tables = {
|
||||
(db_name, table_name)
|
||||
for db_name, tables in (self.restrictions.get("r") or {}).items()
|
||||
for table_name, table_actions in tables.items()
|
||||
if self.action_checks.intersection(table_actions)
|
||||
}
|
||||
# Tables implicitly reference their parent databases
|
||||
self.restricted_databases.update(db for db, _ in self.restricted_tables)
|
||||
|
||||
def evaluate_allow_block(self, allow_block: Any) -> Optional[bool]:
|
||||
"""Evaluate an allow block against the current actor."""
|
||||
if allow_block is None:
|
||||
return None
|
||||
return actor_matches_allow(self.actor, allow_block)
|
||||
|
||||
def is_in_restriction_allowlist(
|
||||
self,
|
||||
parent: Optional[str],
|
||||
child: Optional[str],
|
||||
) -> bool:
|
||||
"""Check if resource is allowed by actor restrictions."""
|
||||
if not self.has_restrictions:
|
||||
return True # No restrictions, all resources allowed
|
||||
|
||||
# Check global allowlist
|
||||
if self.action_checks.intersection(self.restrictions.get("a", [])):
|
||||
return True
|
||||
|
||||
# Check database-level allowlist
|
||||
if parent and self.action_checks.intersection(
|
||||
self.restrictions.get("d", {}).get(parent, [])
|
||||
):
|
||||
return True
|
||||
|
||||
# Check table-level allowlist
|
||||
if parent:
|
||||
table_restrictions = (self.restrictions.get("r", {}) or {}).get(parent, {})
|
||||
if child:
|
||||
table_actions = table_restrictions.get(child, [])
|
||||
if self.action_checks.intersection(table_actions):
|
||||
return True
|
||||
else:
|
||||
# Parent query should proceed if any child in this database is allowlisted
|
||||
for table_actions in table_restrictions.values():
|
||||
if self.action_checks.intersection(table_actions):
|
||||
return True
|
||||
|
||||
# Parent/child both None: include if any restrictions exist for this action
|
||||
if parent is None and child is None:
|
||||
if self.action_checks.intersection(self.restrictions.get("a", [])):
|
||||
return True
|
||||
if self.restricted_databases:
|
||||
return True
|
||||
if self.restricted_tables:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def add_permissions_rule(
|
||||
self,
|
||||
parent: Optional[str],
|
||||
child: Optional[str],
|
||||
permissions_block: Optional[dict],
|
||||
scope_desc: str,
|
||||
) -> None:
|
||||
"""Add a rule from a permissions:{action} block."""
|
||||
if permissions_block is None:
|
||||
return
|
||||
|
||||
action_allow_block = permissions_block.get(self.action)
|
||||
result = self.evaluate_allow_block(action_allow_block)
|
||||
|
||||
self.collector.add(
|
||||
parent=parent,
|
||||
child=child,
|
||||
allow=result,
|
||||
reason=f"config {'allow' if result else 'deny'} {scope_desc}",
|
||||
if_not_none=True,
|
||||
)
|
||||
|
||||
def add_allow_block_rule(
|
||||
self,
|
||||
parent: Optional[str],
|
||||
child: Optional[str],
|
||||
allow_block: Any,
|
||||
scope_desc: str,
|
||||
) -> None:
|
||||
"""
|
||||
Add rules from an allow:{} block.
|
||||
|
||||
For allow blocks, if the block exists but doesn't match the actor,
|
||||
this is treated as a deny. We also handle the restriction-gate logic.
|
||||
"""
|
||||
if allow_block is None:
|
||||
return
|
||||
|
||||
# Skip if resource is not in restriction allowlist
|
||||
if not self.is_in_restriction_allowlist(parent, child):
|
||||
return
|
||||
|
||||
result = self.evaluate_allow_block(allow_block)
|
||||
bool_result = bool(result)
|
||||
|
||||
self.collector.add(
|
||||
parent,
|
||||
child,
|
||||
bool_result,
|
||||
f"config {'allow' if result else 'deny'} {scope_desc}",
|
||||
)
|
||||
|
||||
# Handle restriction-gate: add explicit denies for restricted resources
|
||||
self._add_restriction_gate_denies(parent, child, bool_result, scope_desc)
|
||||
|
||||
def _add_restriction_gate_denies(
|
||||
self,
|
||||
parent: Optional[str],
|
||||
child: Optional[str],
|
||||
is_allowed: bool,
|
||||
scope_desc: str,
|
||||
) -> None:
|
||||
"""
|
||||
When a config rule denies at a higher level, add explicit denies
|
||||
for restricted resources to prevent child-level allows from
|
||||
incorrectly granting access.
|
||||
"""
|
||||
if is_allowed or child is not None or not self.has_restrictions:
|
||||
return
|
||||
|
||||
if not self.action_obj:
|
||||
return
|
||||
|
||||
reason = f"config deny {scope_desc} (restriction gate)"
|
||||
|
||||
if parent is None:
|
||||
# Root-level deny: add denies for all restricted resources
|
||||
if self.action_obj.takes_parent:
|
||||
for db_name in self.restricted_databases:
|
||||
self.collector.add(db_name, None, False, reason)
|
||||
if self.action_obj.takes_child:
|
||||
for db_name, table_name in self.restricted_tables:
|
||||
self.collector.add(db_name, table_name, False, reason)
|
||||
else:
|
||||
# Database-level deny: add denies for tables in that database
|
||||
if self.action_obj.takes_child:
|
||||
for db_name, table_name in self.restricted_tables:
|
||||
if db_name == parent:
|
||||
self.collector.add(db_name, table_name, False, reason)
|
||||
|
||||
def process(self) -> Optional[PermissionSQL]:
|
||||
"""Process all config rules and return combined PermissionSQL."""
|
||||
self._process_root_permissions()
|
||||
self._process_databases()
|
||||
self._process_root_allow_blocks()
|
||||
|
||||
return self.collector.to_permission_sql()
|
||||
|
||||
def _process_root_permissions(self) -> None:
|
||||
"""Process root-level permissions block."""
|
||||
root_perms = self.config.get("permissions") or {}
|
||||
self.add_permissions_rule(
|
||||
None,
|
||||
None,
|
||||
root_perms,
|
||||
f"permissions for {self.action}",
|
||||
)
|
||||
|
||||
def _process_databases(self) -> None:
|
||||
"""Process database-level and nested configurations."""
|
||||
databases = self.config.get("databases") or {}
|
||||
|
||||
for db_name, db_config in databases.items():
|
||||
self._process_database(db_name, db_config or {})
|
||||
|
||||
def _process_database(self, db_name: str, db_config: dict) -> None:
|
||||
"""Process a single database's configuration."""
|
||||
# Database-level permissions block
|
||||
db_perms = db_config.get("permissions") or {}
|
||||
self.add_permissions_rule(
|
||||
db_name,
|
||||
None,
|
||||
db_perms,
|
||||
f"permissions for {self.action} on {db_name}",
|
||||
)
|
||||
|
||||
# Process tables
|
||||
for table_name, table_config in (db_config.get("tables") or {}).items():
|
||||
self._process_table(db_name, table_name, table_config or {})
|
||||
|
||||
# Process queries
|
||||
for query_name, query_config in (db_config.get("queries") or {}).items():
|
||||
self._process_query(db_name, query_name, query_config)
|
||||
|
||||
# Database-level allow blocks
|
||||
self._process_database_allow_blocks(db_name, db_config)
|
||||
|
||||
def _process_table(
|
||||
self,
|
||||
db_name: str,
|
||||
table_name: str,
|
||||
table_config: dict,
|
||||
) -> None:
|
||||
"""Process a single table's configuration."""
|
||||
# Table-level permissions block
|
||||
table_perms = table_config.get("permissions") or {}
|
||||
self.add_permissions_rule(
|
||||
db_name,
|
||||
table_name,
|
||||
table_perms,
|
||||
f"permissions for {self.action} on {db_name}/{table_name}",
|
||||
)
|
||||
|
||||
# Table-level allow block (for view-table)
|
||||
if self.action == "view-table":
|
||||
self.add_allow_block_rule(
|
||||
db_name,
|
||||
table_name,
|
||||
table_config.get("allow"),
|
||||
f"allow for {self.action} on {db_name}/{table_name}",
|
||||
)
|
||||
|
||||
def _process_query(
|
||||
self,
|
||||
db_name: str,
|
||||
query_name: str,
|
||||
query_config: Any,
|
||||
) -> None:
|
||||
"""Process a single query's configuration."""
|
||||
# Query config can be a string (just SQL) or dict
|
||||
if not isinstance(query_config, dict):
|
||||
return
|
||||
|
||||
# Query-level permissions block
|
||||
query_perms = query_config.get("permissions") or {}
|
||||
self.add_permissions_rule(
|
||||
db_name,
|
||||
query_name,
|
||||
query_perms,
|
||||
f"permissions for {self.action} on {db_name}/{query_name}",
|
||||
)
|
||||
|
||||
# Query-level allow block (for view-query)
|
||||
if self.action == "view-query":
|
||||
self.add_allow_block_rule(
|
||||
db_name,
|
||||
query_name,
|
||||
query_config.get("allow"),
|
||||
f"allow for {self.action} on {db_name}/{query_name}",
|
||||
)
|
||||
|
||||
def _process_database_allow_blocks(
|
||||
self,
|
||||
db_name: str,
|
||||
db_config: dict,
|
||||
) -> None:
|
||||
"""Process database-level allow/allow_sql blocks."""
|
||||
# view-database allow block
|
||||
if self.action == "view-database":
|
||||
self.add_allow_block_rule(
|
||||
db_name,
|
||||
None,
|
||||
db_config.get("allow"),
|
||||
f"allow for {self.action} on {db_name}",
|
||||
)
|
||||
|
||||
# execute-sql allow_sql block
|
||||
if self.action == "execute-sql":
|
||||
self.add_allow_block_rule(
|
||||
db_name,
|
||||
None,
|
||||
db_config.get("allow_sql"),
|
||||
f"allow_sql for {db_name}",
|
||||
)
|
||||
|
||||
# view-table uses database-level allow for inheritance
|
||||
if self.action == "view-table":
|
||||
self.add_allow_block_rule(
|
||||
db_name,
|
||||
None,
|
||||
db_config.get("allow"),
|
||||
f"allow for {self.action} on {db_name}",
|
||||
)
|
||||
|
||||
# view-query uses database-level allow for inheritance
|
||||
if self.action == "view-query":
|
||||
self.add_allow_block_rule(
|
||||
db_name,
|
||||
None,
|
||||
db_config.get("allow"),
|
||||
f"allow for {self.action} on {db_name}",
|
||||
)
|
||||
|
||||
def _process_root_allow_blocks(self) -> None:
|
||||
"""Process root-level allow/allow_sql blocks."""
|
||||
root_allow = self.config.get("allow")
|
||||
|
||||
if self.action == "view-instance":
|
||||
self.add_allow_block_rule(
|
||||
None,
|
||||
None,
|
||||
root_allow,
|
||||
"allow for view-instance",
|
||||
)
|
||||
|
||||
if self.action == "view-database":
|
||||
self.add_allow_block_rule(
|
||||
None,
|
||||
None,
|
||||
root_allow,
|
||||
"allow for view-database",
|
||||
)
|
||||
|
||||
if self.action == "view-table":
|
||||
self.add_allow_block_rule(
|
||||
None,
|
||||
None,
|
||||
root_allow,
|
||||
"allow for view-table",
|
||||
)
|
||||
|
||||
if self.action == "view-query":
|
||||
self.add_allow_block_rule(
|
||||
None,
|
||||
None,
|
||||
root_allow,
|
||||
"allow for view-query",
|
||||
)
|
||||
|
||||
if self.action == "execute-sql":
|
||||
self.add_allow_block_rule(
|
||||
None,
|
||||
None,
|
||||
self.config.get("allow_sql"),
|
||||
"allow_sql",
|
||||
)
|
||||
|
||||
|
||||
@hookimpl(specname="permission_resources_sql")
|
||||
async def config_permissions_sql(
|
||||
datasette: "Datasette",
|
||||
actor: Optional[dict],
|
||||
action: str,
|
||||
) -> Optional[List[PermissionSQL]]:
|
||||
"""
|
||||
Apply permission rules from datasette.yaml configuration.
|
||||
|
||||
This processes:
|
||||
- permissions: blocks at root, database, table, and query levels
|
||||
- allow: blocks for view-* actions
|
||||
- allow_sql: blocks for execute-sql action
|
||||
"""
|
||||
processor = ConfigPermissionProcessor(datasette, actor, action)
|
||||
result = processor.process()
|
||||
|
||||
if result is None:
|
||||
return []
|
||||
|
||||
return [result]
|
||||
|
|
@ -1,70 +0,0 @@
|
|||
"""
|
||||
Default permission settings for Datasette.
|
||||
|
||||
Provides default allow rules for standard view/execute actions.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from datasette.app import Datasette
|
||||
|
||||
from datasette import hookimpl
|
||||
from datasette.permissions import PermissionSQL
|
||||
|
||||
|
||||
# Actions that are allowed by default (unless --default-deny is used)
|
||||
DEFAULT_ALLOW_ACTIONS = frozenset(
|
||||
{
|
||||
"view-instance",
|
||||
"view-database",
|
||||
"view-database-download",
|
||||
"view-table",
|
||||
"view-query",
|
||||
"execute-sql",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@hookimpl(specname="permission_resources_sql")
|
||||
async def default_allow_sql_check(
|
||||
datasette: "Datasette",
|
||||
actor: Optional[dict],
|
||||
action: str,
|
||||
) -> Optional[PermissionSQL]:
|
||||
"""
|
||||
Enforce the default_allow_sql setting.
|
||||
|
||||
When default_allow_sql is false (the default), execute-sql is denied
|
||||
unless explicitly allowed by config or other rules.
|
||||
"""
|
||||
if action == "execute-sql":
|
||||
if not datasette.setting("default_allow_sql"):
|
||||
return PermissionSQL.deny(reason="default_allow_sql is false")
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@hookimpl(specname="permission_resources_sql")
|
||||
async def default_action_permissions_sql(
|
||||
datasette: "Datasette",
|
||||
actor: Optional[dict],
|
||||
action: str,
|
||||
) -> Optional[PermissionSQL]:
|
||||
"""
|
||||
Provide default allow rules for standard view/execute actions.
|
||||
|
||||
These defaults are skipped when datasette is started with --default-deny.
|
||||
The restriction_sql mechanism (from actor_restrictions_sql) will still
|
||||
filter these results if the actor has restrictions.
|
||||
"""
|
||||
if datasette.default_deny:
|
||||
return None
|
||||
|
||||
if action in DEFAULT_ALLOW_ACTIONS:
|
||||
reason = f"default allow for {action}".replace("'", "''")
|
||||
return PermissionSQL.allow(reason=reason)
|
||||
|
||||
return None
|
||||
|
|
@ -1,85 +0,0 @@
|
|||
"""
|
||||
Shared helper utilities for default permission implementations.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, List, Optional, Set
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from datasette.app import Datasette
|
||||
|
||||
from datasette.permissions import PermissionSQL
|
||||
|
||||
|
||||
def get_action_name_variants(datasette: "Datasette", action: str) -> Set[str]:
|
||||
"""
|
||||
Get all name variants for an action (full name and abbreviation).
|
||||
|
||||
Example:
|
||||
get_action_name_variants(ds, "view-table") -> {"view-table", "vt"}
|
||||
"""
|
||||
variants = {action}
|
||||
action_obj = datasette.actions.get(action)
|
||||
if action_obj and action_obj.abbr:
|
||||
variants.add(action_obj.abbr)
|
||||
return variants
|
||||
|
||||
|
||||
def action_in_list(datasette: "Datasette", action: str, action_list: list) -> bool:
|
||||
"""Check if an action (or its abbreviation) is in a list."""
|
||||
return bool(get_action_name_variants(datasette, action).intersection(action_list))
|
||||
|
||||
|
||||
@dataclass
|
||||
class PermissionRow:
|
||||
"""A single permission rule row."""
|
||||
|
||||
parent: Optional[str]
|
||||
child: Optional[str]
|
||||
allow: bool
|
||||
reason: str
|
||||
|
||||
|
||||
class PermissionRowCollector:
|
||||
"""Collects permission rows and converts them to PermissionSQL."""
|
||||
|
||||
def __init__(self, prefix: str = "row"):
|
||||
self.rows: List[PermissionRow] = []
|
||||
self.prefix = prefix
|
||||
|
||||
def add(
|
||||
self,
|
||||
parent: Optional[str],
|
||||
child: Optional[str],
|
||||
allow: Optional[bool],
|
||||
reason: str,
|
||||
if_not_none: bool = False,
|
||||
) -> None:
|
||||
"""Add a permission row. If if_not_none=True, only add if allow is not None."""
|
||||
if if_not_none and allow is None:
|
||||
return
|
||||
self.rows.append(PermissionRow(parent, child, allow, reason))
|
||||
|
||||
def to_permission_sql(self) -> Optional[PermissionSQL]:
|
||||
"""Convert collected rows to a PermissionSQL object."""
|
||||
if not self.rows:
|
||||
return None
|
||||
|
||||
parts = []
|
||||
params = {}
|
||||
|
||||
for idx, row in enumerate(self.rows):
|
||||
key = f"{self.prefix}_{idx}"
|
||||
parts.append(
|
||||
f"SELECT :{key}_parent AS parent, :{key}_child AS child, "
|
||||
f":{key}_allow AS allow, :{key}_reason AS reason"
|
||||
)
|
||||
params[f"{key}_parent"] = row.parent
|
||||
params[f"{key}_child"] = row.child
|
||||
params[f"{key}_allow"] = 1 if row.allow else 0
|
||||
params[f"{key}_reason"] = row.reason
|
||||
|
||||
sql = "\nUNION ALL\n".join(parts)
|
||||
return PermissionSQL(sql=sql, params=params)
|
||||
|
|
@ -1,195 +0,0 @@
|
|||
"""
|
||||
Actor restriction handling for Datasette permissions.
|
||||
|
||||
This module handles the _r (restrictions) key in actor dictionaries, which
|
||||
contains allowlists of resources the actor can access.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, List, Optional, Set, Tuple
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from datasette.app import Datasette
|
||||
|
||||
from datasette import hookimpl
|
||||
from datasette.permissions import PermissionSQL
|
||||
|
||||
from .helpers import action_in_list, get_action_name_variants
|
||||
|
||||
|
||||
@dataclass
|
||||
class ActorRestrictions:
|
||||
"""Parsed actor restrictions from the _r key."""
|
||||
|
||||
global_actions: List[str] # _r.a - globally allowed actions
|
||||
database_actions: dict # _r.d - {db_name: [actions]}
|
||||
table_actions: dict # _r.r - {db_name: {table: [actions]}}
|
||||
|
||||
@classmethod
|
||||
def from_actor(cls, actor: Optional[dict]) -> Optional["ActorRestrictions"]:
|
||||
"""Parse restrictions from actor dict. Returns None if no restrictions."""
|
||||
if not actor:
|
||||
return None
|
||||
assert isinstance(actor, dict), "actor must be a dictionary"
|
||||
|
||||
restrictions = actor.get("_r")
|
||||
if restrictions is None:
|
||||
return None
|
||||
|
||||
return cls(
|
||||
global_actions=restrictions.get("a", []),
|
||||
database_actions=restrictions.get("d", {}),
|
||||
table_actions=restrictions.get("r", {}),
|
||||
)
|
||||
|
||||
def is_action_globally_allowed(self, datasette: "Datasette", action: str) -> bool:
|
||||
"""Check if action is in the global allowlist."""
|
||||
return action_in_list(datasette, action, self.global_actions)
|
||||
|
||||
def get_allowed_databases(self, datasette: "Datasette", action: str) -> Set[str]:
|
||||
"""Get database names where this action is allowed."""
|
||||
allowed = set()
|
||||
for db_name, db_actions in self.database_actions.items():
|
||||
if action_in_list(datasette, action, db_actions):
|
||||
allowed.add(db_name)
|
||||
return allowed
|
||||
|
||||
def get_allowed_tables(
|
||||
self, datasette: "Datasette", action: str
|
||||
) -> Set[Tuple[str, str]]:
|
||||
"""Get (database, table) pairs where this action is allowed."""
|
||||
allowed = set()
|
||||
for db_name, tables in self.table_actions.items():
|
||||
for table_name, table_actions in tables.items():
|
||||
if action_in_list(datasette, action, table_actions):
|
||||
allowed.add((db_name, table_name))
|
||||
return allowed
|
||||
|
||||
|
||||
@hookimpl(specname="permission_resources_sql")
|
||||
async def actor_restrictions_sql(
|
||||
datasette: "Datasette",
|
||||
actor: Optional[dict],
|
||||
action: str,
|
||||
) -> Optional[List[PermissionSQL]]:
|
||||
"""
|
||||
Handle actor restriction-based permission rules.
|
||||
|
||||
When an actor has an "_r" key, it contains an allowlist of resources they
|
||||
can access. This function returns restriction_sql that filters the final
|
||||
results to only include resources in that allowlist.
|
||||
|
||||
The _r structure:
|
||||
{
|
||||
"a": ["vi", "pd"], # Global actions allowed
|
||||
"d": {"mydb": ["vt", "es"]}, # Database-level actions
|
||||
"r": {"mydb": {"users": ["vt"]}} # Table-level actions
|
||||
}
|
||||
"""
|
||||
if not actor:
|
||||
return None
|
||||
|
||||
restrictions = ActorRestrictions.from_actor(actor)
|
||||
|
||||
if restrictions is None:
|
||||
# No restrictions - all resources allowed
|
||||
return []
|
||||
|
||||
# If globally allowed, no filtering needed
|
||||
if restrictions.is_action_globally_allowed(datasette, action):
|
||||
return []
|
||||
|
||||
# Build restriction SQL
|
||||
allowed_dbs = restrictions.get_allowed_databases(datasette, action)
|
||||
allowed_tables = restrictions.get_allowed_tables(datasette, action)
|
||||
|
||||
# If nothing is allowed for this action, return empty-set restriction
|
||||
if not allowed_dbs and not allowed_tables:
|
||||
return [
|
||||
PermissionSQL(
|
||||
params={"deny": f"actor restrictions: {action} not in allowlist"},
|
||||
restriction_sql="SELECT NULL AS parent, NULL AS child WHERE 0",
|
||||
)
|
||||
]
|
||||
|
||||
# Build UNION of allowed resources
|
||||
selects = []
|
||||
params = {}
|
||||
counter = 0
|
||||
|
||||
# Database-level entries (parent, NULL) - allows all children
|
||||
for db_name in allowed_dbs:
|
||||
key = f"restr_{counter}"
|
||||
counter += 1
|
||||
selects.append(f"SELECT :{key}_parent AS parent, NULL AS child")
|
||||
params[f"{key}_parent"] = db_name
|
||||
|
||||
# Table-level entries (parent, child)
|
||||
for db_name, table_name in allowed_tables:
|
||||
key = f"restr_{counter}"
|
||||
counter += 1
|
||||
selects.append(f"SELECT :{key}_parent AS parent, :{key}_child AS child")
|
||||
params[f"{key}_parent"] = db_name
|
||||
params[f"{key}_child"] = table_name
|
||||
|
||||
restriction_sql = "\nUNION ALL\n".join(selects)
|
||||
|
||||
return [PermissionSQL(params=params, restriction_sql=restriction_sql)]
|
||||
|
||||
|
||||
def restrictions_allow_action(
|
||||
datasette: "Datasette",
|
||||
restrictions: dict,
|
||||
action: str,
|
||||
resource: Optional[str | Tuple[str, str]],
|
||||
) -> bool:
|
||||
"""
|
||||
Check if restrictions allow the requested action on the requested resource.
|
||||
|
||||
This is a synchronous utility function for use by other code that needs
|
||||
to quickly check restriction allowlists.
|
||||
|
||||
Args:
|
||||
datasette: The Datasette instance
|
||||
restrictions: The _r dict from an actor
|
||||
action: The action name to check
|
||||
resource: None for global, str for database, (db, table) tuple for table
|
||||
|
||||
Returns:
|
||||
True if allowed, False if denied
|
||||
"""
|
||||
# Does this action have an abbreviation?
|
||||
to_check = get_action_name_variants(datasette, action)
|
||||
|
||||
# Check global level (any resource)
|
||||
all_allowed = restrictions.get("a")
|
||||
if all_allowed is not None:
|
||||
assert isinstance(all_allowed, list)
|
||||
if to_check.intersection(all_allowed):
|
||||
return True
|
||||
|
||||
# Check database level
|
||||
if resource:
|
||||
if isinstance(resource, str):
|
||||
database_name = resource
|
||||
else:
|
||||
database_name = resource[0]
|
||||
database_allowed = restrictions.get("d", {}).get(database_name)
|
||||
if database_allowed is not None:
|
||||
assert isinstance(database_allowed, list)
|
||||
if to_check.intersection(database_allowed):
|
||||
return True
|
||||
|
||||
# Check table/resource level
|
||||
if resource is not None and not isinstance(resource, str) and len(resource) == 2:
|
||||
database, table = resource
|
||||
table_allowed = restrictions.get("r", {}).get(database, {}).get(table)
|
||||
if table_allowed is not None:
|
||||
assert isinstance(table_allowed, list)
|
||||
if to_check.intersection(table_allowed):
|
||||
return True
|
||||
|
||||
# This action is not explicitly allowed, so reject it
|
||||
return False
|
||||
|
|
@ -1,29 +0,0 @@
|
|||
"""
|
||||
Root user permission handling for Datasette.
|
||||
|
||||
Grants full permissions to the root user when --root flag is used.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from datasette.app import Datasette
|
||||
|
||||
from datasette import hookimpl
|
||||
from datasette.permissions import PermissionSQL
|
||||
|
||||
|
||||
@hookimpl(specname="permission_resources_sql")
|
||||
async def root_user_permissions_sql(
|
||||
datasette: "Datasette",
|
||||
actor: Optional[dict],
|
||||
) -> Optional[PermissionSQL]:
|
||||
"""
|
||||
Grant root user full permissions when --root flag is used.
|
||||
"""
|
||||
if not datasette.root_enabled:
|
||||
return None
|
||||
if actor is not None and actor.get("id") == "root":
|
||||
return PermissionSQL.allow(reason="root user")
|
||||
|
|
@ -1,95 +0,0 @@
|
|||
"""
|
||||
Token authentication for Datasette.
|
||||
|
||||
Handles signed API tokens (dstok_ prefix).
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import time
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from datasette.app import Datasette
|
||||
|
||||
import itsdangerous
|
||||
|
||||
from datasette import hookimpl
|
||||
|
||||
|
||||
@hookimpl(specname="actor_from_request")
|
||||
def actor_from_signed_api_token(datasette: "Datasette", request) -> Optional[dict]:
|
||||
"""
|
||||
Authenticate requests using signed API tokens (dstok_ prefix).
|
||||
|
||||
Token structure (signed JSON):
|
||||
{
|
||||
"a": "actor_id", # Actor ID
|
||||
"t": 1234567890, # Timestamp (Unix epoch)
|
||||
"d": 3600, # Optional: Duration in seconds
|
||||
"_r": {...} # Optional: Restrictions
|
||||
}
|
||||
"""
|
||||
prefix = "dstok_"
|
||||
|
||||
# Check if tokens are enabled
|
||||
if not datasette.setting("allow_signed_tokens"):
|
||||
return None
|
||||
|
||||
max_signed_tokens_ttl = datasette.setting("max_signed_tokens_ttl")
|
||||
|
||||
# Get authorization header
|
||||
authorization = request.headers.get("authorization")
|
||||
if not authorization:
|
||||
return None
|
||||
if not authorization.startswith("Bearer "):
|
||||
return None
|
||||
|
||||
token = authorization[len("Bearer ") :]
|
||||
if not token.startswith(prefix):
|
||||
return None
|
||||
|
||||
# Remove prefix and verify signature
|
||||
token = token[len(prefix) :]
|
||||
try:
|
||||
decoded = datasette.unsign(token, namespace="token")
|
||||
except itsdangerous.BadSignature:
|
||||
return None
|
||||
|
||||
# Validate timestamp
|
||||
if "t" not in decoded:
|
||||
return None
|
||||
created = decoded["t"]
|
||||
if not isinstance(created, int):
|
||||
return None
|
||||
|
||||
# Handle duration/expiry
|
||||
duration = decoded.get("d")
|
||||
if duration is not None and not isinstance(duration, int):
|
||||
return None
|
||||
|
||||
# Apply max TTL if configured
|
||||
if (duration is None and max_signed_tokens_ttl) or (
|
||||
duration is not None
|
||||
and max_signed_tokens_ttl
|
||||
and duration > max_signed_tokens_ttl
|
||||
):
|
||||
duration = max_signed_tokens_ttl
|
||||
|
||||
# Check expiry
|
||||
if duration:
|
||||
if time.time() - created > duration:
|
||||
return None
|
||||
|
||||
# Build actor dict
|
||||
actor = {"id": decoded["a"], "token": "dstok"}
|
||||
|
||||
# Copy restrictions if present
|
||||
if "_r" in decoded:
|
||||
actor["_r"] = decoded["_r"]
|
||||
|
||||
# Add expiry timestamp if applicable
|
||||
if duration:
|
||||
actor["token_expires"] = created + duration
|
||||
|
||||
return actor
|
||||
|
|
@ -1,235 +0,0 @@
|
|||
from abc import ABC, abstractproperty
|
||||
from dataclasses import asdict, dataclass, field
|
||||
from datasette.hookspecs import hookimpl
|
||||
from datetime import datetime, timezone
|
||||
|
||||
|
||||
@dataclass
|
||||
class Event(ABC):
|
||||
@abstractproperty
|
||||
def name(self):
|
||||
pass
|
||||
|
||||
created: datetime = field(
|
||||
init=False, default_factory=lambda: datetime.now(timezone.utc)
|
||||
)
|
||||
actor: dict | None
|
||||
|
||||
def properties(self):
|
||||
properties = asdict(self)
|
||||
properties.pop("actor", None)
|
||||
properties.pop("created", None)
|
||||
return properties
|
||||
|
||||
|
||||
@dataclass
|
||||
class LoginEvent(Event):
|
||||
"""
|
||||
Event name: ``login``
|
||||
|
||||
A user (represented by ``event.actor``) has logged in.
|
||||
"""
|
||||
|
||||
name = "login"
|
||||
|
||||
|
||||
@dataclass
|
||||
class LogoutEvent(Event):
|
||||
"""
|
||||
Event name: ``logout``
|
||||
|
||||
A user (represented by ``event.actor``) has logged out.
|
||||
"""
|
||||
|
||||
name = "logout"
|
||||
|
||||
|
||||
@dataclass
|
||||
class CreateTokenEvent(Event):
|
||||
"""
|
||||
Event name: ``create-token``
|
||||
|
||||
A user created an API token.
|
||||
|
||||
:ivar expires_after: Number of seconds after which this token will expire.
|
||||
:type expires_after: int or None
|
||||
:ivar restrict_all: Restricted permissions for this token.
|
||||
:type restrict_all: list
|
||||
:ivar restrict_database: Restricted database permissions for this token.
|
||||
:type restrict_database: dict
|
||||
:ivar restrict_resource: Restricted resource permissions for this token.
|
||||
:type restrict_resource: dict
|
||||
"""
|
||||
|
||||
name = "create-token"
|
||||
expires_after: int | None
|
||||
restrict_all: list
|
||||
restrict_database: dict
|
||||
restrict_resource: dict
|
||||
|
||||
|
||||
@dataclass
|
||||
class CreateTableEvent(Event):
|
||||
"""
|
||||
Event name: ``create-table``
|
||||
|
||||
A new table has been created in the database.
|
||||
|
||||
:ivar database: The name of the database where the table was created.
|
||||
:type database: str
|
||||
:ivar table: The name of the table that was created
|
||||
:type table: str
|
||||
:ivar schema: The SQL schema definition for the new table.
|
||||
:type schema: str
|
||||
"""
|
||||
|
||||
name = "create-table"
|
||||
database: str
|
||||
table: str
|
||||
schema: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class DropTableEvent(Event):
|
||||
"""
|
||||
Event name: ``drop-table``
|
||||
|
||||
A table has been dropped from the database.
|
||||
|
||||
:ivar database: The name of the database where the table was dropped.
|
||||
:type database: str
|
||||
:ivar table: The name of the table that was dropped
|
||||
:type table: str
|
||||
"""
|
||||
|
||||
name = "drop-table"
|
||||
database: str
|
||||
table: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class AlterTableEvent(Event):
|
||||
"""
|
||||
Event name: ``alter-table``
|
||||
|
||||
A table has been altered.
|
||||
|
||||
:ivar database: The name of the database where the table was altered
|
||||
:type database: str
|
||||
:ivar table: The name of the table that was altered
|
||||
:type table: str
|
||||
:ivar before_schema: The table's SQL schema before the alteration
|
||||
:type before_schema: str
|
||||
:ivar after_schema: The table's SQL schema after the alteration
|
||||
:type after_schema: str
|
||||
"""
|
||||
|
||||
name = "alter-table"
|
||||
database: str
|
||||
table: str
|
||||
before_schema: str
|
||||
after_schema: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class InsertRowsEvent(Event):
|
||||
"""
|
||||
Event name: ``insert-rows``
|
||||
|
||||
Rows were inserted into a table.
|
||||
|
||||
:ivar database: The name of the database where the rows were inserted.
|
||||
:type database: str
|
||||
:ivar table: The name of the table where the rows were inserted.
|
||||
:type table: str
|
||||
:ivar num_rows: The number of rows that were requested to be inserted.
|
||||
:type num_rows: int
|
||||
:ivar ignore: Was ignore set?
|
||||
:type ignore: bool
|
||||
:ivar replace: Was replace set?
|
||||
:type replace: bool
|
||||
"""
|
||||
|
||||
name = "insert-rows"
|
||||
database: str
|
||||
table: str
|
||||
num_rows: int
|
||||
ignore: bool
|
||||
replace: bool
|
||||
|
||||
|
||||
@dataclass
|
||||
class UpsertRowsEvent(Event):
|
||||
"""
|
||||
Event name: ``upsert-rows``
|
||||
|
||||
Rows were upserted into a table.
|
||||
|
||||
:ivar database: The name of the database where the rows were inserted.
|
||||
:type database: str
|
||||
:ivar table: The name of the table where the rows were inserted.
|
||||
:type table: str
|
||||
:ivar num_rows: The number of rows that were requested to be inserted.
|
||||
:type num_rows: int
|
||||
"""
|
||||
|
||||
name = "upsert-rows"
|
||||
database: str
|
||||
table: str
|
||||
num_rows: int
|
||||
|
||||
|
||||
@dataclass
|
||||
class UpdateRowEvent(Event):
|
||||
"""
|
||||
Event name: ``update-row``
|
||||
|
||||
A row was updated in a table.
|
||||
|
||||
:ivar database: The name of the database where the row was updated.
|
||||
:type database: str
|
||||
:ivar table: The name of the table where the row was updated.
|
||||
:type table: str
|
||||
:ivar pks: The primary key values of the updated row.
|
||||
"""
|
||||
|
||||
name = "update-row"
|
||||
database: str
|
||||
table: str
|
||||
pks: list
|
||||
|
||||
|
||||
@dataclass
|
||||
class DeleteRowEvent(Event):
|
||||
"""
|
||||
Event name: ``delete-row``
|
||||
|
||||
A row was deleted from a table.
|
||||
|
||||
:ivar database: The name of the database where the row was deleted.
|
||||
:type database: str
|
||||
:ivar table: The name of the table where the row was deleted.
|
||||
:type table: str
|
||||
:ivar pks: The primary key values of the deleted row.
|
||||
"""
|
||||
|
||||
name = "delete-row"
|
||||
database: str
|
||||
table: str
|
||||
pks: list
|
||||
|
||||
|
||||
@hookimpl
|
||||
def register_events():
|
||||
return [
|
||||
LoginEvent,
|
||||
LogoutEvent,
|
||||
CreateTableEvent,
|
||||
CreateTokenEvent,
|
||||
AlterTableEvent,
|
||||
DropTableEvent,
|
||||
InsertRowsEvent,
|
||||
UpsertRowsEvent,
|
||||
UpdateRowEvent,
|
||||
DeleteRowEvent,
|
||||
]
|
||||
|
|
@ -11,8 +11,8 @@ from datasette.utils import (
|
|||
)
|
||||
|
||||
|
||||
def load_facet_configs(request, table_config):
|
||||
# Given a request and the configuration for a table, return
|
||||
def load_facet_configs(request, table_metadata):
|
||||
# Given a request and the metadata configuration for a table, return
|
||||
# a dictionary of selected facets, their lists of configs and for each
|
||||
# config whether it came from the request or the metadata.
|
||||
#
|
||||
|
|
@ -20,21 +20,21 @@ def load_facet_configs(request, table_config):
|
|||
# {"source": "metadata", "config": config1},
|
||||
# {"source": "request", "config": config2}]}
|
||||
facet_configs = {}
|
||||
table_config = table_config or {}
|
||||
table_facet_configs = table_config.get("facets", [])
|
||||
for facet_config in table_facet_configs:
|
||||
if isinstance(facet_config, str):
|
||||
table_metadata = table_metadata or {}
|
||||
metadata_facets = table_metadata.get("facets", [])
|
||||
for metadata_config in metadata_facets:
|
||||
if isinstance(metadata_config, str):
|
||||
type = "column"
|
||||
facet_config = {"simple": facet_config}
|
||||
metadata_config = {"simple": metadata_config}
|
||||
else:
|
||||
assert (
|
||||
len(facet_config.values()) == 1
|
||||
len(metadata_config.values()) == 1
|
||||
), "Metadata config dicts should be {type: config}"
|
||||
type, facet_config = list(facet_config.items())[0]
|
||||
if isinstance(facet_config, str):
|
||||
facet_config = {"simple": facet_config}
|
||||
type, metadata_config = list(metadata_config.items())[0]
|
||||
if isinstance(metadata_config, str):
|
||||
metadata_config = {"simple": metadata_config}
|
||||
facet_configs.setdefault(type, []).append(
|
||||
{"source": "metadata", "config": facet_config}
|
||||
{"source": "metadata", "config": metadata_config}
|
||||
)
|
||||
qs_pairs = urllib.parse.parse_qs(request.query_string, keep_blank_values=True)
|
||||
for key, values in qs_pairs.items():
|
||||
|
|
@ -45,12 +45,13 @@ def load_facet_configs(request, table_config):
|
|||
elif key.startswith("_facet_"):
|
||||
type = key[len("_facet_") :]
|
||||
for value in values:
|
||||
# The value is the facet_config - either JSON or not
|
||||
facet_config = (
|
||||
json.loads(value) if value.startswith("{") else {"simple": value}
|
||||
)
|
||||
# The value is the config - either JSON or not
|
||||
if value.startswith("{"):
|
||||
config = json.loads(value)
|
||||
else:
|
||||
config = {"simple": value}
|
||||
facet_configs.setdefault(type, []).append(
|
||||
{"source": "request", "config": facet_config}
|
||||
{"source": "request", "config": config}
|
||||
)
|
||||
return facet_configs
|
||||
|
||||
|
|
@ -65,8 +66,6 @@ def register_facet_classes():
|
|||
|
||||
class Facet:
|
||||
type = None
|
||||
# How many rows to consider when suggesting facets:
|
||||
suggest_consider = 1000
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
|
@ -76,7 +75,7 @@ class Facet:
|
|||
sql=None,
|
||||
table=None,
|
||||
params=None,
|
||||
table_config=None,
|
||||
metadata=None,
|
||||
row_count=None,
|
||||
):
|
||||
assert table or sql, "Must provide either table= or sql="
|
||||
|
|
@ -87,12 +86,12 @@ class Facet:
|
|||
self.table = table
|
||||
self.sql = sql or f"select * from [{table}]"
|
||||
self.params = params or []
|
||||
self.table_config = table_config
|
||||
self.metadata = metadata
|
||||
# row_count can be None, in which case we calculate it ourselves:
|
||||
self.row_count = row_count
|
||||
|
||||
def get_configs(self):
|
||||
configs = load_facet_configs(self.request, self.table_config)
|
||||
configs = load_facet_configs(self.request, self.metadata)
|
||||
return configs.get(self.type) or []
|
||||
|
||||
def get_querystring_pairs(self):
|
||||
|
|
@ -105,15 +104,10 @@ class Facet:
|
|||
max_returned_rows = self.ds.setting("max_returned_rows")
|
||||
table_facet_size = None
|
||||
if self.table:
|
||||
config_facet_size = (
|
||||
self.ds.config.get("databases", {})
|
||||
.get(self.database, {})
|
||||
.get("tables", {})
|
||||
.get(self.table, {})
|
||||
.get("facet_size")
|
||||
)
|
||||
if config_facet_size:
|
||||
table_facet_size = config_facet_size
|
||||
tables_metadata = self.ds.metadata("tables", database=self.database) or {}
|
||||
table_metadata = tables_metadata.get(self.table) or {}
|
||||
if table_metadata:
|
||||
table_facet_size = table_metadata.get("facet_size")
|
||||
custom_facet_size = self.request.args.get("_facet_size")
|
||||
if custom_facet_size:
|
||||
if custom_facet_size == "max":
|
||||
|
|
@ -147,6 +141,17 @@ class Facet:
|
|||
)
|
||||
).columns
|
||||
|
||||
async def get_row_count(self):
|
||||
if self.row_count is None:
|
||||
self.row_count = (
|
||||
await self.ds.execute(
|
||||
self.database,
|
||||
f"select count(*) from ({self.sql})",
|
||||
self.params,
|
||||
)
|
||||
).rows[0][0]
|
||||
return self.row_count
|
||||
|
||||
|
||||
class ColumnFacet(Facet):
|
||||
type = "column"
|
||||
|
|
@ -161,16 +166,13 @@ class ColumnFacet(Facet):
|
|||
if column in already_enabled:
|
||||
continue
|
||||
suggested_facet_sql = """
|
||||
with limited as (select * from ({sql}) limit {suggest_consider})
|
||||
select {column} as value, count(*) as n from limited
|
||||
where value is not null
|
||||
select {column} as value, count(*) as n from (
|
||||
{sql}
|
||||
) where value is not null
|
||||
group by value
|
||||
limit {limit}
|
||||
""".format(
|
||||
column=escape_sqlite(column),
|
||||
sql=self.sql,
|
||||
limit=facet_size + 1,
|
||||
suggest_consider=self.suggest_consider,
|
||||
column=escape_sqlite(column), sql=self.sql, limit=facet_size + 1
|
||||
)
|
||||
distinct_values = None
|
||||
try:
|
||||
|
|
@ -205,17 +207,6 @@ class ColumnFacet(Facet):
|
|||
continue
|
||||
return suggested_facets
|
||||
|
||||
async def get_row_count(self):
|
||||
if self.row_count is None:
|
||||
self.row_count = (
|
||||
await self.ds.execute(
|
||||
self.database,
|
||||
f"select count(*) from (select * from ({self.sql}) limit {self.suggest_consider})",
|
||||
self.params,
|
||||
)
|
||||
).rows[0][0]
|
||||
return self.row_count
|
||||
|
||||
async def facet_results(self):
|
||||
facet_results = []
|
||||
facets_timed_out = []
|
||||
|
|
@ -262,7 +253,7 @@ class ColumnFacet(Facet):
|
|||
# Attempt to expand foreign keys into labels
|
||||
values = [row["value"] for row in facet_rows]
|
||||
expanded = await self.ds.expand_foreign_keys(
|
||||
self.request.actor, self.database, self.table, column, values
|
||||
self.database, self.table, column, values
|
||||
)
|
||||
else:
|
||||
expanded = {}
|
||||
|
|
@ -318,14 +309,11 @@ class ArrayFacet(Facet):
|
|||
continue
|
||||
# Is every value in this column either null or a JSON array?
|
||||
suggested_facet_sql = """
|
||||
with limited as (select * from ({sql}) limit {suggest_consider})
|
||||
select distinct json_type({column})
|
||||
from limited
|
||||
from ({sql})
|
||||
where {column} is not null and {column} != ''
|
||||
""".format(
|
||||
column=escape_sqlite(column),
|
||||
sql=self.sql,
|
||||
suggest_consider=self.suggest_consider,
|
||||
column=escape_sqlite(column), sql=self.sql
|
||||
)
|
||||
try:
|
||||
results = await self.ds.execute(
|
||||
|
|
@ -410,9 +398,7 @@ class ArrayFacet(Facet):
|
|||
order by
|
||||
count(*) desc, value limit {limit}
|
||||
""".format(
|
||||
col=escape_sqlite(column),
|
||||
sql=self.sql,
|
||||
limit=facet_size + 1,
|
||||
col=escape_sqlite(column), sql=self.sql, limit=facet_size + 1
|
||||
)
|
||||
try:
|
||||
facet_rows_results = await self.ds.execute(
|
||||
|
|
@ -480,8 +466,8 @@ class DateFacet(Facet):
|
|||
# Does this column contain any dates in the first 100 rows?
|
||||
suggested_facet_sql = """
|
||||
select date({column}) from (
|
||||
select * from ({sql}) limit 100
|
||||
) where {column} glob "????-??-*"
|
||||
{sql}
|
||||
) where {column} glob "????-??-*" limit 100;
|
||||
""".format(
|
||||
column=escape_sqlite(column), sql=self.sql
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
from datasette import hookimpl
|
||||
from datasette.resources import DatabaseResource
|
||||
from datasette.views.base import DatasetteError
|
||||
from datasette.utils.asgi import BadRequest
|
||||
import json
|
||||
import numbers
|
||||
from .utils import detect_json1, escape_sqlite, path_with_removed_args
|
||||
|
||||
|
||||
|
|
@ -13,10 +13,11 @@ def where_filters(request, database, datasette):
|
|||
where_clauses = []
|
||||
extra_wheres_for_ui = []
|
||||
if "_where" in request.args:
|
||||
if not await datasette.allowed(
|
||||
action="execute-sql",
|
||||
resource=DatabaseResource(database=database),
|
||||
actor=request.actor,
|
||||
if not await datasette.permission_allowed(
|
||||
request.actor,
|
||||
"execute-sql",
|
||||
resource=database,
|
||||
default=True,
|
||||
):
|
||||
raise DatasetteError("_where= is not allowed", status=403)
|
||||
else:
|
||||
|
|
@ -49,7 +50,7 @@ def search_filters(request, database, table, datasette):
|
|||
extra_context = {}
|
||||
|
||||
# Figure out which fts_table to use
|
||||
table_metadata = await datasette.table_config(database, table)
|
||||
table_metadata = datasette.table_metadata(database, table)
|
||||
db = datasette.get_database(database)
|
||||
fts_table = request.args.get("_fts_table")
|
||||
fts_table = fts_table or table_metadata.get("fts_table")
|
||||
|
|
@ -79,9 +80,9 @@ def search_filters(request, database, table, datasette):
|
|||
"{fts_pk} in (select rowid from {fts_table} where {fts_table} match {match_clause})".format(
|
||||
fts_table=escape_sqlite(fts_table),
|
||||
fts_pk=escape_sqlite(fts_pk),
|
||||
match_clause=(
|
||||
":search" if search_mode_raw else "escape_fts(:search)"
|
||||
),
|
||||
match_clause=":search"
|
||||
if search_mode_raw
|
||||
else "escape_fts(:search)",
|
||||
)
|
||||
)
|
||||
human_descriptions.append(f'search matches "{search}"')
|
||||
|
|
@ -98,11 +99,9 @@ def search_filters(request, database, table, datasette):
|
|||
"rowid in (select rowid from {fts_table} where {search_col} match {match_clause})".format(
|
||||
fts_table=escape_sqlite(fts_table),
|
||||
search_col=escape_sqlite(search_col),
|
||||
match_clause=(
|
||||
":search_{}".format(i)
|
||||
if search_mode_raw
|
||||
else "escape_fts(:search_{})".format(i)
|
||||
),
|
||||
match_clause=":search_{}".format(i)
|
||||
if search_mode_raw
|
||||
else "escape_fts(:search_{})".format(i),
|
||||
)
|
||||
)
|
||||
human_descriptions.append(
|
||||
|
|
@ -280,13 +279,6 @@ class Filters:
|
|||
'{c} contains "{v}"',
|
||||
format="%{}%",
|
||||
),
|
||||
TemplatedFilter(
|
||||
"notcontains",
|
||||
"does not contain",
|
||||
'"{c}" not like :{p}',
|
||||
'{c} does not contain "{v}"',
|
||||
format="%{}%",
|
||||
),
|
||||
TemplatedFilter(
|
||||
"endswith",
|
||||
"ends with",
|
||||
|
|
@ -367,8 +359,12 @@ class Filters:
|
|||
)
|
||||
_filters_by_key = {f.key: f for f in _filters}
|
||||
|
||||
def __init__(self, pairs):
|
||||
def __init__(self, pairs, units=None, ureg=None):
|
||||
if units is None:
|
||||
units = {}
|
||||
self.pairs = pairs
|
||||
self.units = units
|
||||
self.ureg = ureg
|
||||
|
||||
def lookups(self):
|
||||
"""Yields (lookup, display, no_argument) pairs"""
|
||||
|
|
@ -408,6 +404,20 @@ class Filters:
|
|||
def has_selections(self):
|
||||
return bool(self.pairs)
|
||||
|
||||
def convert_unit(self, column, value):
|
||||
"""If the user has provided a unit in the query, convert it into the column unit, if present."""
|
||||
if column not in self.units:
|
||||
return value
|
||||
|
||||
# Try to interpret the value as a unit
|
||||
value = self.ureg(value)
|
||||
if isinstance(value, numbers.Number):
|
||||
# It's just a bare number, assume it's the column unit
|
||||
return value
|
||||
|
||||
column_unit = self.ureg(self.units[column])
|
||||
return value.to(column_unit).magnitude
|
||||
|
||||
def build_where_clauses(self, table):
|
||||
sql_bits = []
|
||||
params = {}
|
||||
|
|
@ -415,7 +425,9 @@ class Filters:
|
|||
for column, lookup, value in self.selections():
|
||||
filter = self._filters_by_key.get(lookup, None)
|
||||
if filter:
|
||||
sql_bit, param = filter.where_clause(table, column, value, i)
|
||||
sql_bit, param = filter.where_clause(
|
||||
table, column, self.convert_unit(column, value), i
|
||||
)
|
||||
sql_bits.append(sql_bit)
|
||||
if param is not None:
|
||||
if not isinstance(param, list):
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
from os import stat
|
||||
from datasette import hookimpl, Response
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,16 +1,14 @@
|
|||
from datasette import hookimpl, Response
|
||||
from .utils import add_cors_headers
|
||||
from .utils import await_me_maybe, add_cors_headers
|
||||
from .utils.asgi import (
|
||||
Base400,
|
||||
Forbidden,
|
||||
)
|
||||
from .views.base import DatasetteError
|
||||
from markupsafe import Markup
|
||||
import pdb
|
||||
import traceback
|
||||
|
||||
try:
|
||||
import ipdb as pdb
|
||||
except ImportError:
|
||||
import pdb
|
||||
from .plugins import pm
|
||||
|
||||
try:
|
||||
import rich
|
||||
|
|
@ -59,8 +57,7 @@ def handle_exception(datasette, request, exception):
|
|||
if request.path.split("?")[0].endswith(".json"):
|
||||
return Response.json(info, status=status, headers=headers)
|
||||
else:
|
||||
environment = datasette.get_jinja_environment(request)
|
||||
template = environment.select_template(templates)
|
||||
template = datasette.jinja_env.select_template(templates)
|
||||
return Response.html(
|
||||
await template.render_async(
|
||||
dict(
|
||||
|
|
|
|||
|
|
@ -10,6 +10,11 @@ def startup(datasette):
|
|||
"""Fires directly after Datasette first starts running"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def get_metadata(datasette, key, database, table):
|
||||
"""Return metadata to be merged into Datasette's metadata dictionary"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def asgi_wrapper(datasette):
|
||||
"""Returns an ASGI middleware callable to wrap our ASGI application with"""
|
||||
|
|
@ -55,7 +60,7 @@ def publish_subcommand(publish):
|
|||
|
||||
|
||||
@hookspec
|
||||
def render_cell(row, value, column, table, database, datasette, request):
|
||||
def render_cell(row, value, column, table, database, datasette):
|
||||
"""Customize rendering of HTML table cell values"""
|
||||
|
||||
|
||||
|
|
@ -69,11 +74,6 @@ def register_facet_classes():
|
|||
"""Register Facet subclasses"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def register_actions(datasette):
|
||||
"""Register actions: returns a list of datasette.permission.Action objects"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def register_routes(datasette):
|
||||
"""Register URL routes: return a list of (regex, view_function) pairs"""
|
||||
|
|
@ -89,16 +89,6 @@ def actor_from_request(datasette, request):
|
|||
"""Return an actor dictionary based on the incoming request"""
|
||||
|
||||
|
||||
@hookspec(firstresult=True)
|
||||
def actors_from_ids(datasette, actor_ids):
|
||||
"""Returns a dictionary mapping those IDs to actor dictionaries"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def jinja2_environment_from_request(datasette, request, env):
|
||||
"""Return a Jinja2 environment based on the incoming request"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def filters_from_request(request, database, table, datasette):
|
||||
"""
|
||||
|
|
@ -111,15 +101,8 @@ def filters_from_request(request, database, table, datasette):
|
|||
|
||||
|
||||
@hookspec
|
||||
def permission_resources_sql(datasette, actor, action):
|
||||
"""Return SQL query fragments for permission checks on resources.
|
||||
|
||||
Returns None, a PermissionSQL object, or a list of PermissionSQL objects.
|
||||
Each PermissionSQL contains SQL that should return rows with columns:
|
||||
parent (str|None), child (str|None), allow (int), reason (str).
|
||||
|
||||
Used to efficiently check permissions across multiple resources at once.
|
||||
"""
|
||||
def permission_allowed(datasette, actor, action, resource):
|
||||
"""Check if actor is allowed to perform this action - return True, False or None"""
|
||||
|
||||
|
||||
@hookspec
|
||||
|
|
@ -142,36 +125,16 @@ def menu_links(datasette, actor, request):
|
|||
"""Links for the navigation menu"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def row_actions(datasette, actor, request, database, table, row):
|
||||
"""Links for the row actions menu"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def table_actions(datasette, actor, database, table, request):
|
||||
"""Links for the table actions menu"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def view_actions(datasette, actor, database, view, request):
|
||||
"""Links for the view actions menu"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def query_actions(datasette, actor, database, query_name, request, sql, params):
|
||||
"""Links for the query and canned query actions menu"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def database_actions(datasette, actor, database, request):
|
||||
"""Links for the database actions menu"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def homepage_actions(datasette, actor, request):
|
||||
"""Links for the homepage actions menu"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def skip_csrf(datasette, scope):
|
||||
"""Mechanism for skipping CSRF checks for certain requests"""
|
||||
|
|
@ -180,43 +143,3 @@ def skip_csrf(datasette, scope):
|
|||
@hookspec
|
||||
def handle_exception(datasette, request, exception):
|
||||
"""Handle an uncaught exception. Can return a Response or None."""
|
||||
|
||||
|
||||
@hookspec
|
||||
def track_event(datasette, event):
|
||||
"""Respond to an event tracked by Datasette"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def register_events(datasette):
|
||||
"""Return a list of Event subclasses to use with track_event()"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def top_homepage(datasette, request):
|
||||
"""HTML to include at the top of the homepage"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def top_database(datasette, request, database):
|
||||
"""HTML to include at the top of the database page"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def top_table(datasette, request, database, table):
|
||||
"""HTML to include at the top of the table page"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def top_row(datasette, request, database, table, row):
|
||||
"""HTML to include at the top of the row page"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def top_query(datasette, request, database, sql):
|
||||
"""HTML to include at the top of the query results page"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def top_canned_query(datasette, request, database, query_name):
|
||||
"""HTML to include at the top of the canned query page"""
|
||||
|
|
|
|||
|
|
@ -1,210 +1,19 @@
|
|||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, NamedTuple
|
||||
import contextvars
|
||||
import collections
|
||||
|
||||
|
||||
# Context variable to track when permission checks should be skipped
|
||||
_skip_permission_checks = contextvars.ContextVar(
|
||||
"skip_permission_checks", default=False
|
||||
Permission = collections.namedtuple(
|
||||
"Permission", ("name", "abbr", "takes_database", "takes_table", "default")
|
||||
)
|
||||
|
||||
|
||||
class SkipPermissions:
|
||||
"""Context manager to temporarily skip permission checks.
|
||||
|
||||
This is not a stable API and may change in future releases.
|
||||
|
||||
Usage:
|
||||
with SkipPermissions():
|
||||
# Permission checks are skipped within this block
|
||||
response = await datasette.client.get("/protected")
|
||||
"""
|
||||
|
||||
def __enter__(self):
|
||||
self.token = _skip_permission_checks.set(True)
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
_skip_permission_checks.reset(self.token)
|
||||
return False
|
||||
|
||||
|
||||
class Resource(ABC):
|
||||
"""
|
||||
Base class for all resource types.
|
||||
|
||||
Each subclass represents a type of resource (e.g., TableResource, DatabaseResource).
|
||||
The class itself carries metadata about the resource type.
|
||||
Instances represent specific resources.
|
||||
"""
|
||||
|
||||
# Class-level metadata (subclasses must define these)
|
||||
name: str = None # e.g., "table", "database", "model"
|
||||
parent_class: type["Resource"] | None = None # e.g., DatabaseResource for tables
|
||||
|
||||
# Instance-level optional extra attributes
|
||||
reasons: list[str] | None = None
|
||||
include_reasons: bool | None = None
|
||||
|
||||
def __init__(self, parent: str | None = None, child: str | None = None):
|
||||
"""
|
||||
Create a resource instance.
|
||||
|
||||
Args:
|
||||
parent: The parent identifier (meaning depends on resource type)
|
||||
child: The child identifier (meaning depends on resource type)
|
||||
"""
|
||||
self.parent = parent
|
||||
self.child = child
|
||||
self._private = None # Sentinel to track if private was set
|
||||
|
||||
@property
|
||||
def private(self) -> bool:
|
||||
"""
|
||||
Whether this resource is private (accessible to actor but not anonymous).
|
||||
|
||||
This property is only available on Resource objects returned from
|
||||
allowed_resources() when include_is_private=True is used.
|
||||
|
||||
Raises:
|
||||
AttributeError: If accessed without calling include_is_private=True
|
||||
"""
|
||||
if self._private is None:
|
||||
raise AttributeError(
|
||||
"The 'private' attribute is only available when using "
|
||||
"allowed_resources(..., include_is_private=True)"
|
||||
)
|
||||
return self._private
|
||||
|
||||
@private.setter
|
||||
def private(self, value: bool):
|
||||
self._private = value
|
||||
|
||||
@classmethod
|
||||
def __init_subclass__(cls):
|
||||
"""
|
||||
Validate resource hierarchy doesn't exceed 2 levels.
|
||||
|
||||
Raises:
|
||||
ValueError: If this resource would create a 3-level hierarchy
|
||||
"""
|
||||
super().__init_subclass__()
|
||||
|
||||
if cls.parent_class is None:
|
||||
return # Top of hierarchy, nothing to validate
|
||||
|
||||
# Check if our parent has a parent - that would create 3 levels
|
||||
if cls.parent_class.parent_class is not None:
|
||||
# We have a parent, and that parent has a parent
|
||||
# This creates a 3-level hierarchy, which is not allowed
|
||||
raise ValueError(
|
||||
f"Resource {cls.__name__} creates a 3-level hierarchy: "
|
||||
f"{cls.parent_class.parent_class.__name__} -> {cls.parent_class.__name__} -> {cls.__name__}. "
|
||||
f"Maximum 2 levels allowed (parent -> child)."
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def resources_sql(cls) -> str:
|
||||
"""
|
||||
Return SQL query that returns all resources of this type.
|
||||
|
||||
Must return two columns: parent, child
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class AllowedResource(NamedTuple):
|
||||
"""A resource with the reason it was allowed (for debugging)."""
|
||||
|
||||
resource: Resource
|
||||
reason: str
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class Action:
|
||||
name: str
|
||||
description: str | None
|
||||
abbr: str | None = None
|
||||
resource_class: type[Resource] | None = None
|
||||
also_requires: str | None = None # Optional action name that must also be allowed
|
||||
|
||||
@property
|
||||
def takes_parent(self) -> bool:
|
||||
"""
|
||||
Whether this action requires a parent identifier when instantiating its resource.
|
||||
|
||||
Returns False for global-only actions (no resource_class).
|
||||
Returns True for all actions with a resource_class (all resources require a parent identifier).
|
||||
"""
|
||||
return self.resource_class is not None
|
||||
|
||||
@property
|
||||
def takes_child(self) -> bool:
|
||||
"""
|
||||
Whether this action requires a child identifier when instantiating its resource.
|
||||
|
||||
Returns False for global actions (no resource_class).
|
||||
Returns False for parent-level resources (DatabaseResource - parent_class is None).
|
||||
Returns True for child-level resources (TableResource, QueryResource - have a parent_class).
|
||||
"""
|
||||
if self.resource_class is None:
|
||||
return False
|
||||
return self.resource_class.parent_class is not None
|
||||
|
||||
|
||||
_reason_id = 1
|
||||
|
||||
|
||||
@dataclass
|
||||
class PermissionSQL:
|
||||
"""
|
||||
A plugin contributes SQL that yields:
|
||||
parent TEXT NULL,
|
||||
child TEXT NULL,
|
||||
allow INTEGER, -- 1 allow, 0 deny
|
||||
reason TEXT
|
||||
|
||||
For restriction-only plugins, sql can be None and only restriction_sql is provided.
|
||||
"""
|
||||
|
||||
sql: str | None = (
|
||||
None # SQL that SELECTs the 4 columns above (can be None for restriction-only)
|
||||
)
|
||||
params: dict[str, Any] | None = (
|
||||
None # bound params for the SQL (values only; no ':' prefix)
|
||||
)
|
||||
source: str | None = None # System will set this to the plugin name
|
||||
restriction_sql: str | None = (
|
||||
None # Optional SQL that returns (parent, child) for restriction filtering
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def allow(cls, reason: str, _allow: bool = True) -> "PermissionSQL":
|
||||
global _reason_id
|
||||
i = _reason_id
|
||||
_reason_id += 1
|
||||
return cls(
|
||||
sql=f"SELECT NULL AS parent, NULL AS child, {1 if _allow else 0} AS allow, :reason_{i} AS reason",
|
||||
params={f"reason_{i}": reason},
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def deny(cls, reason: str) -> "PermissionSQL":
|
||||
return cls.allow(reason=reason, _allow=False)
|
||||
|
||||
|
||||
# This is obsolete, replaced by Action and ResourceType
|
||||
@dataclass
|
||||
class Permission:
|
||||
name: str
|
||||
abbr: str | None
|
||||
description: str | None
|
||||
takes_database: bool
|
||||
takes_resource: bool
|
||||
default: bool
|
||||
# This is deliberately undocumented: it's considered an internal
|
||||
# implementation detail for view-table/view-database and should
|
||||
# not be used by plugins as it may change in the future.
|
||||
implies_can_view: bool = False
|
||||
PERMISSIONS = (
|
||||
Permission("view-instance", "vi", False, False, True),
|
||||
Permission("view-database", "vd", True, False, True),
|
||||
Permission("view-database-download", "vdd", True, False, True),
|
||||
Permission("view-table", "vt", True, True, True),
|
||||
Permission("view-query", "vq", True, True, True),
|
||||
Permission("insert-row", "ir", True, True, False),
|
||||
Permission("delete-row", "dr", True, True, False),
|
||||
Permission("drop-table", "dt", True, True, False),
|
||||
Permission("execute-sql", "es", True, False, True),
|
||||
Permission("permissions-debug", "pd", False, False, False),
|
||||
Permission("debug-menu", "dm", False, False, False),
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,20 +1,9 @@
|
|||
import importlib
|
||||
import os
|
||||
import pluggy
|
||||
from pprint import pprint
|
||||
import pkg_resources
|
||||
import sys
|
||||
from . import hookspecs
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
import importlib.resources as importlib_resources
|
||||
else:
|
||||
import importlib_resources
|
||||
if sys.version_info >= (3, 10):
|
||||
import importlib.metadata as importlib_metadata
|
||||
else:
|
||||
import importlib_metadata
|
||||
|
||||
|
||||
DEFAULT_PLUGINS = (
|
||||
"datasette.publish.heroku",
|
||||
"datasette.publish.cloudrun",
|
||||
|
|
@ -23,65 +12,20 @@ DEFAULT_PLUGINS = (
|
|||
"datasette.sql_functions",
|
||||
"datasette.actor_auth_cookie",
|
||||
"datasette.default_permissions",
|
||||
"datasette.default_actions",
|
||||
"datasette.default_magic_parameters",
|
||||
"datasette.blob_renderer",
|
||||
"datasette.default_menu_links",
|
||||
"datasette.handle_exception",
|
||||
"datasette.forbidden",
|
||||
"datasette.events",
|
||||
)
|
||||
|
||||
pm = pluggy.PluginManager("datasette")
|
||||
pm.add_hookspecs(hookspecs)
|
||||
|
||||
DATASETTE_TRACE_PLUGINS = os.environ.get("DATASETTE_TRACE_PLUGINS", None)
|
||||
|
||||
|
||||
def before(hook_name, hook_impls, kwargs):
|
||||
print(file=sys.stderr)
|
||||
print(f"{hook_name}:", file=sys.stderr)
|
||||
pprint(kwargs, width=40, indent=4, stream=sys.stderr)
|
||||
print("Hook implementations:", file=sys.stderr)
|
||||
pprint(hook_impls, width=40, indent=4, stream=sys.stderr)
|
||||
|
||||
|
||||
def after(outcome, hook_name, hook_impls, kwargs):
|
||||
results = outcome.get_result()
|
||||
if not isinstance(results, list):
|
||||
results = [results]
|
||||
print("Results:", file=sys.stderr)
|
||||
pprint(results, width=40, indent=4, stream=sys.stderr)
|
||||
|
||||
|
||||
if DATASETTE_TRACE_PLUGINS:
|
||||
pm.add_hookcall_monitoring(before, after)
|
||||
|
||||
|
||||
DATASETTE_LOAD_PLUGINS = os.environ.get("DATASETTE_LOAD_PLUGINS", None)
|
||||
|
||||
if not hasattr(sys, "_called_from_test") and DATASETTE_LOAD_PLUGINS is None:
|
||||
if not hasattr(sys, "_called_from_test"):
|
||||
# Only load plugins if not running tests
|
||||
pm.load_setuptools_entrypoints("datasette")
|
||||
|
||||
# Load any plugins specified in DATASETTE_LOAD_PLUGINS")
|
||||
if DATASETTE_LOAD_PLUGINS is not None:
|
||||
for package_name in [
|
||||
name for name in DATASETTE_LOAD_PLUGINS.split(",") if name.strip()
|
||||
]:
|
||||
try:
|
||||
distribution = importlib_metadata.distribution(package_name)
|
||||
entry_points = distribution.entry_points
|
||||
for entry_point in entry_points:
|
||||
if entry_point.group == "datasette":
|
||||
mod = entry_point.load()
|
||||
pm.register(mod, name=entry_point.name)
|
||||
# Ensure name can be found in plugin_to_distinfo later:
|
||||
pm._plugin_distinfo.append((mod, distribution))
|
||||
except importlib_metadata.PackageNotFoundError:
|
||||
sys.stderr.write("Plugin {} could not be found\n".format(package_name))
|
||||
|
||||
|
||||
# Load default plugins
|
||||
for plugin in DEFAULT_PLUGINS:
|
||||
mod = importlib.import_module(plugin)
|
||||
|
|
@ -94,24 +38,21 @@ def get_plugins():
|
|||
for plugin in pm.get_plugins():
|
||||
static_path = None
|
||||
templates_path = None
|
||||
plugin_name = (
|
||||
plugin.__name__
|
||||
if hasattr(plugin, "__name__")
|
||||
else plugin.__class__.__name__
|
||||
)
|
||||
if plugin_name not in DEFAULT_PLUGINS:
|
||||
if plugin.__name__ not in DEFAULT_PLUGINS:
|
||||
try:
|
||||
if (importlib_resources.files(plugin_name) / "static").is_dir():
|
||||
static_path = str(importlib_resources.files(plugin_name) / "static")
|
||||
if (importlib_resources.files(plugin_name) / "templates").is_dir():
|
||||
templates_path = str(
|
||||
importlib_resources.files(plugin_name) / "templates"
|
||||
if pkg_resources.resource_isdir(plugin.__name__, "static"):
|
||||
static_path = pkg_resources.resource_filename(
|
||||
plugin.__name__, "static"
|
||||
)
|
||||
except (TypeError, ModuleNotFoundError):
|
||||
# Caused by --plugins_dir= plugins
|
||||
if pkg_resources.resource_isdir(plugin.__name__, "templates"):
|
||||
templates_path = pkg_resources.resource_filename(
|
||||
plugin.__name__, "templates"
|
||||
)
|
||||
except (KeyError, ImportError):
|
||||
# Caused by --plugins_dir= plugins - KeyError/ImportError thrown in Py3.5
|
||||
pass
|
||||
plugin_info = {
|
||||
"name": plugin_name,
|
||||
"name": plugin.__name__,
|
||||
"static_path": static_path,
|
||||
"templates_path": templates_path,
|
||||
"hooks": [h.name for h in pm.get_hookcallers(plugin)],
|
||||
|
|
@ -119,6 +60,6 @@ def get_plugins():
|
|||
distinfo = plugin_to_distinfo.get(plugin)
|
||||
if distinfo:
|
||||
plugin_info["version"] = distinfo.version
|
||||
plugin_info["name"] = distinfo.name or distinfo.project_name
|
||||
plugin_info["name"] = distinfo.project_name
|
||||
plugins.append(plugin_info)
|
||||
return plugins
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import click
|
|||
import json
|
||||
import os
|
||||
import re
|
||||
from subprocess import CalledProcessError, check_call, check_output
|
||||
from subprocess import check_call, check_output
|
||||
|
||||
from .common import (
|
||||
add_common_publish_arguments_and_options,
|
||||
|
|
@ -23,9 +23,7 @@ def publish_subcommand(publish):
|
|||
help="Application name to use when building",
|
||||
)
|
||||
@click.option(
|
||||
"--service",
|
||||
default="",
|
||||
help="Cloud Run service to deploy (or over-write)",
|
||||
"--service", default="", help="Cloud Run service to deploy (or over-write)"
|
||||
)
|
||||
@click.option("--spatialite", is_flag=True, help="Enable SpatialLite extension")
|
||||
@click.option(
|
||||
|
|
@ -57,32 +55,13 @@ def publish_subcommand(publish):
|
|||
@click.option(
|
||||
"--max-instances",
|
||||
type=int,
|
||||
default=1,
|
||||
show_default=True,
|
||||
help="Maximum Cloud Run instances (use 0 to remove the limit)",
|
||||
help="Maximum Cloud Run instances",
|
||||
)
|
||||
@click.option(
|
||||
"--min-instances",
|
||||
type=int,
|
||||
help="Minimum Cloud Run instances",
|
||||
)
|
||||
@click.option(
|
||||
"--artifact-repository",
|
||||
default="datasette",
|
||||
show_default=True,
|
||||
help="Artifact Registry repository to store the image",
|
||||
)
|
||||
@click.option(
|
||||
"--artifact-region",
|
||||
default="us",
|
||||
show_default=True,
|
||||
help="Artifact Registry location (region or multi-region)",
|
||||
)
|
||||
@click.option(
|
||||
"--artifact-project",
|
||||
default=None,
|
||||
help="Project ID for Artifact Registry (defaults to the active project)",
|
||||
)
|
||||
def cloudrun(
|
||||
files,
|
||||
metadata,
|
||||
|
|
@ -112,9 +91,6 @@ def publish_subcommand(publish):
|
|||
apt_get_extras,
|
||||
max_instances,
|
||||
min_instances,
|
||||
artifact_repository,
|
||||
artifact_region,
|
||||
artifact_project,
|
||||
):
|
||||
"Publish databases to Datasette running on Cloud Run"
|
||||
fail_if_publish_binary_not_installed(
|
||||
|
|
@ -124,21 +100,6 @@ def publish_subcommand(publish):
|
|||
"gcloud config get-value project", shell=True, universal_newlines=True
|
||||
).strip()
|
||||
|
||||
artifact_project = artifact_project or project
|
||||
|
||||
# Ensure Artifact Registry exists for the target image
|
||||
_ensure_artifact_registry(
|
||||
artifact_project=artifact_project,
|
||||
artifact_region=artifact_region,
|
||||
artifact_repository=artifact_repository,
|
||||
)
|
||||
|
||||
artifact_host = (
|
||||
artifact_region
|
||||
if artifact_region.endswith("-docker.pkg.dev")
|
||||
else f"{artifact_region}-docker.pkg.dev"
|
||||
)
|
||||
|
||||
if not service:
|
||||
# Show the user their current services, then prompt for one
|
||||
click.echo("Please provide a service name for this deployment\n")
|
||||
|
|
@ -156,11 +117,6 @@ def publish_subcommand(publish):
|
|||
click.echo("")
|
||||
service = click.prompt("Service name", type=str)
|
||||
|
||||
image_id = (
|
||||
f"{artifact_host}/{artifact_project}/"
|
||||
f"{artifact_repository}/datasette-{service}"
|
||||
)
|
||||
|
||||
extra_metadata = {
|
||||
"title": title,
|
||||
"license": license,
|
||||
|
|
@ -217,6 +173,7 @@ def publish_subcommand(publish):
|
|||
print(fp.read())
|
||||
print("\n====================\n")
|
||||
|
||||
image_id = f"gcr.io/{project}/{name}"
|
||||
check_call(
|
||||
"gcloud builds submit --tag {}{}".format(
|
||||
image_id, " --timeout {}".format(timeout) if timeout else ""
|
||||
|
|
@ -230,7 +187,7 @@ def publish_subcommand(publish):
|
|||
("--max-instances", max_instances),
|
||||
("--min-instances", min_instances),
|
||||
):
|
||||
if value is not None:
|
||||
if value:
|
||||
extra_deploy_options.append("{} {}".format(option, value))
|
||||
check_call(
|
||||
"gcloud run deploy --allow-unauthenticated --platform=managed --image {} {}{}".format(
|
||||
|
|
@ -242,52 +199,6 @@ def publish_subcommand(publish):
|
|||
)
|
||||
|
||||
|
||||
def _ensure_artifact_registry(artifact_project, artifact_region, artifact_repository):
|
||||
"""Ensure Artifact Registry API is enabled and the repository exists."""
|
||||
|
||||
enable_cmd = (
|
||||
"gcloud services enable artifactregistry.googleapis.com "
|
||||
f"--project {artifact_project} --quiet"
|
||||
)
|
||||
try:
|
||||
check_call(enable_cmd, shell=True)
|
||||
except CalledProcessError as exc:
|
||||
raise click.ClickException(
|
||||
"Failed to enable artifactregistry.googleapis.com. "
|
||||
"Please ensure you have permissions to manage services."
|
||||
) from exc
|
||||
|
||||
describe_cmd = (
|
||||
"gcloud artifacts repositories describe {repo} --project {project} "
|
||||
"--location {location} --quiet"
|
||||
).format(
|
||||
repo=artifact_repository,
|
||||
project=artifact_project,
|
||||
location=artifact_region,
|
||||
)
|
||||
try:
|
||||
check_call(describe_cmd, shell=True)
|
||||
return
|
||||
except CalledProcessError:
|
||||
create_cmd = (
|
||||
"gcloud artifacts repositories create {repo} --repository-format=docker "
|
||||
'--location {location} --project {project} --description "Datasette Cloud Run images" --quiet'
|
||||
).format(
|
||||
repo=artifact_repository,
|
||||
location=artifact_region,
|
||||
project=artifact_project,
|
||||
)
|
||||
try:
|
||||
check_call(create_cmd, shell=True)
|
||||
click.echo(f"Created Artifact Registry repository '{artifact_repository}'")
|
||||
except CalledProcessError as exc:
|
||||
raise click.ClickException(
|
||||
"Failed to create Artifact Registry repository. "
|
||||
"Use --artifact-repository/--artifact-region to point to an existing repo "
|
||||
"or create one manually."
|
||||
) from exc
|
||||
|
||||
|
||||
def get_existing_services():
|
||||
services = json.loads(
|
||||
check_output(
|
||||
|
|
@ -303,7 +214,6 @@ def get_existing_services():
|
|||
"url": service["status"]["address"]["url"],
|
||||
}
|
||||
for service in services
|
||||
if "url" in service["status"]
|
||||
]
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@ from datasette.utils import (
|
|||
remove_infinites,
|
||||
CustomJSONEncoder,
|
||||
path_from_row_pks,
|
||||
sqlite3,
|
||||
)
|
||||
from datasette.utils.asgi import Response
|
||||
|
||||
|
|
@ -20,14 +19,14 @@ def convert_specific_columns_to_json(rows, columns, json_cols):
|
|||
if column in json_cols:
|
||||
try:
|
||||
value = json.loads(value)
|
||||
except (TypeError, ValueError):
|
||||
except (TypeError, ValueError) as e:
|
||||
pass
|
||||
new_row.append(value)
|
||||
new_rows.append(new_row)
|
||||
return new_rows
|
||||
|
||||
|
||||
def json_renderer(request, args, data, error, truncated=None):
|
||||
def json_renderer(args, data, view_name):
|
||||
"""Render a response as JSON"""
|
||||
status_code = 200
|
||||
|
||||
|
|
@ -45,38 +44,28 @@ def json_renderer(request, args, data, error, truncated=None):
|
|||
data["rows"] = [remove_infinites(row) for row in data["rows"]]
|
||||
|
||||
# Deal with the _shape option
|
||||
shape = args.get("_shape", "objects")
|
||||
shape = args.get("_shape", "arrays")
|
||||
# if there's an error, ignore the shape entirely
|
||||
data["ok"] = True
|
||||
if error:
|
||||
shape = "objects"
|
||||
status_code = 400
|
||||
data["error"] = error
|
||||
data["ok"] = False
|
||||
if data.get("error"):
|
||||
shape = "arrays"
|
||||
|
||||
next_url = data.get("next_url")
|
||||
|
||||
if truncated is not None:
|
||||
data["truncated"] = truncated
|
||||
if shape == "arrayfirst":
|
||||
if not data["rows"]:
|
||||
data = []
|
||||
elif isinstance(data["rows"][0], sqlite3.Row):
|
||||
data = [row[0] for row in data["rows"]]
|
||||
else:
|
||||
assert isinstance(data["rows"][0], dict)
|
||||
data = [next(iter(row.values())) for row in data["rows"]]
|
||||
data = [row[0] for row in data["rows"]]
|
||||
elif shape in ("objects", "object", "array"):
|
||||
columns = data.get("columns")
|
||||
rows = data.get("rows")
|
||||
if rows and columns and not isinstance(rows[0], dict):
|
||||
if rows and columns:
|
||||
data["rows"] = [dict(zip(columns, row)) for row in rows]
|
||||
if shape == "object":
|
||||
shape_error = None
|
||||
error = None
|
||||
if "primary_keys" not in data:
|
||||
shape_error = "_shape=object is only available on tables"
|
||||
error = "_shape=object is only available on tables"
|
||||
else:
|
||||
pks = data["primary_keys"]
|
||||
if not pks:
|
||||
shape_error = (
|
||||
error = (
|
||||
"_shape=object not available for tables with no primary keys"
|
||||
)
|
||||
else:
|
||||
|
|
@ -85,18 +74,13 @@ def json_renderer(request, args, data, error, truncated=None):
|
|||
pk_string = path_from_row_pks(row, pks, not pks)
|
||||
object_rows[pk_string] = row
|
||||
data = object_rows
|
||||
if shape_error:
|
||||
data = {"ok": False, "error": shape_error}
|
||||
if error:
|
||||
data = {"ok": False, "error": error}
|
||||
elif shape == "array":
|
||||
data = data["rows"]
|
||||
|
||||
elif shape == "arrays":
|
||||
if not data["rows"]:
|
||||
pass
|
||||
elif isinstance(data["rows"][0], sqlite3.Row):
|
||||
data["rows"] = [list(row) for row in data["rows"]]
|
||||
else:
|
||||
data["rows"] = [list(row.values()) for row in data["rows"]]
|
||||
pass
|
||||
else:
|
||||
status_code = 400
|
||||
data = {
|
||||
|
|
@ -105,12 +89,6 @@ def json_renderer(request, args, data, error, truncated=None):
|
|||
"status": 400,
|
||||
"title": None,
|
||||
}
|
||||
|
||||
# Don't include "columns" in output
|
||||
# https://github.com/simonw/datasette/issues/2136
|
||||
if isinstance(data, dict) and "columns" not in request.args.getlist("_extra"):
|
||||
data.pop("columns", None)
|
||||
|
||||
# Handle _nl option for _shape=array
|
||||
nl = args.get("_nl", "")
|
||||
if nl and shape == "array":
|
||||
|
|
@ -120,6 +98,8 @@ def json_renderer(request, args, data, error, truncated=None):
|
|||
body = json.dumps(data, cls=CustomJSONEncoder)
|
||||
content_type = "application/json; charset=utf-8"
|
||||
headers = {}
|
||||
if next_url:
|
||||
headers["link"] = f'<{next_url}>; rel="next"'
|
||||
return Response(
|
||||
body, status=status_code, headers=headers, content_type=content_type
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,90 +0,0 @@
|
|||
"""Core resource types for Datasette's permission system."""
|
||||
|
||||
from datasette.permissions import Resource
|
||||
|
||||
|
||||
class DatabaseResource(Resource):
|
||||
"""A database in Datasette."""
|
||||
|
||||
name = "database"
|
||||
parent_class = None # Top of the resource hierarchy
|
||||
|
||||
def __init__(self, database: str):
|
||||
super().__init__(parent=database, child=None)
|
||||
|
||||
@classmethod
|
||||
async def resources_sql(cls, datasette) -> str:
|
||||
return """
|
||||
SELECT database_name AS parent, NULL AS child
|
||||
FROM catalog_databases
|
||||
"""
|
||||
|
||||
|
||||
class TableResource(Resource):
|
||||
"""A table in a database."""
|
||||
|
||||
name = "table"
|
||||
parent_class = DatabaseResource
|
||||
|
||||
def __init__(self, database: str, table: str):
|
||||
super().__init__(parent=database, child=table)
|
||||
|
||||
@classmethod
|
||||
async def resources_sql(cls, datasette) -> str:
|
||||
return """
|
||||
SELECT database_name AS parent, table_name AS child
|
||||
FROM catalog_tables
|
||||
UNION ALL
|
||||
SELECT database_name AS parent, view_name AS child
|
||||
FROM catalog_views
|
||||
"""
|
||||
|
||||
|
||||
class QueryResource(Resource):
|
||||
"""A canned query in a database."""
|
||||
|
||||
name = "query"
|
||||
parent_class = DatabaseResource
|
||||
|
||||
def __init__(self, database: str, query: str):
|
||||
super().__init__(parent=database, child=query)
|
||||
|
||||
@classmethod
|
||||
async def resources_sql(cls, datasette) -> str:
|
||||
from datasette.plugins import pm
|
||||
from datasette.utils import await_me_maybe
|
||||
|
||||
# Get all databases from catalog
|
||||
db = datasette.get_internal_database()
|
||||
result = await db.execute("SELECT database_name FROM catalog_databases")
|
||||
databases = [row[0] for row in result.rows]
|
||||
|
||||
# Gather all canned queries from all databases
|
||||
query_pairs = []
|
||||
for database_name in databases:
|
||||
# Call the hook to get queries (including from config via default plugin)
|
||||
for queries_result in pm.hook.canned_queries(
|
||||
datasette=datasette,
|
||||
database=database_name,
|
||||
actor=None, # Get ALL queries for resource enumeration
|
||||
):
|
||||
queries = await await_me_maybe(queries_result)
|
||||
if queries:
|
||||
for query_name in queries.keys():
|
||||
query_pairs.append((database_name, query_name))
|
||||
|
||||
# Build SQL
|
||||
if not query_pairs:
|
||||
return "SELECT NULL AS parent, NULL AS child WHERE 0"
|
||||
|
||||
# Generate UNION ALL query
|
||||
selects = []
|
||||
for db_name, query_name in query_pairs:
|
||||
# Escape single quotes by doubling them
|
||||
db_escaped = db_name.replace("'", "''")
|
||||
query_escaped = query_name.replace("'", "''")
|
||||
selects.append(
|
||||
f"SELECT '{db_escaped}' AS parent, '{query_escaped}' AS child"
|
||||
)
|
||||
|
||||
return " UNION ALL ".join(selects)
|
||||
|
|
@ -163,22 +163,28 @@ h6,
|
|||
}
|
||||
|
||||
.page-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
padding-left: 10px;
|
||||
border-left: 10px solid #666;
|
||||
margin-bottom: 0.75rem;
|
||||
margin-top: 1rem;
|
||||
}
|
||||
.page-header h1 {
|
||||
display: inline;
|
||||
margin: 0;
|
||||
font-size: 2rem;
|
||||
padding-right: 0.2em;
|
||||
}
|
||||
|
||||
.page-action-menu details > summary {
|
||||
.page-header details {
|
||||
display: inline-flex;
|
||||
}
|
||||
.page-header details > summary {
|
||||
list-style: none;
|
||||
display: inline-flex;
|
||||
cursor: pointer;
|
||||
}
|
||||
.page-action-menu details > summary::-webkit-details-marker {
|
||||
.page-header details > summary::-webkit-details-marker {
|
||||
display: none;
|
||||
}
|
||||
|
||||
|
|
@ -222,6 +228,12 @@ button.button-as-link:focus {
|
|||
color: #67C98D;
|
||||
}
|
||||
|
||||
a img {
|
||||
display: block;
|
||||
max-width: 100%;
|
||||
border: 0;
|
||||
}
|
||||
|
||||
code,
|
||||
pre {
|
||||
font-family: monospace;
|
||||
|
|
@ -259,28 +271,24 @@ a.not-underlined {
|
|||
|
||||
/* Page Furniture ========================================================= */
|
||||
/* Header */
|
||||
header.hd,
|
||||
footer.ft {
|
||||
header,
|
||||
footer {
|
||||
padding: 0.6rem 1rem 0.5rem 1rem;
|
||||
background-color: #276890;
|
||||
background: linear-gradient(180deg, rgba(96,144,173,1) 0%, rgba(39,104,144,1) 50%);
|
||||
color: rgba(255,255,244,0.9);
|
||||
overflow: hidden;
|
||||
box-sizing: border-box;
|
||||
min-height: 2.6rem;
|
||||
}
|
||||
footer.ft {
|
||||
margin-top: 1rem;
|
||||
}
|
||||
header.hd p,
|
||||
footer.ft p {
|
||||
header p,
|
||||
footer p {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
header.hd .crumbs {
|
||||
header .crumbs {
|
||||
float: left;
|
||||
}
|
||||
header.hd .actor {
|
||||
header .actor {
|
||||
float: right;
|
||||
text-align: right;
|
||||
padding-left: 1rem;
|
||||
|
|
@ -289,32 +297,32 @@ header.hd .actor {
|
|||
top: -3px;
|
||||
}
|
||||
|
||||
footer.ft a:link,
|
||||
footer.ft a:visited,
|
||||
footer.ft a:hover,
|
||||
footer.ft a:focus,
|
||||
footer.ft a:active,
|
||||
footer.ft button.button-as-link {
|
||||
footer a:link,
|
||||
footer a:visited,
|
||||
footer a:hover,
|
||||
footer a:focus,
|
||||
footer a:active,
|
||||
footer button.button-as-link {
|
||||
color: rgba(255,255,244,0.8);
|
||||
}
|
||||
header.hd a:link,
|
||||
header.hd a:visited,
|
||||
header.hd a:hover,
|
||||
header.hd a:focus,
|
||||
header.hd a:active,
|
||||
header.hd button.button-as-link {
|
||||
header a:link,
|
||||
header a:visited,
|
||||
header a:hover,
|
||||
header a:focus,
|
||||
header a:active,
|
||||
header button.button-as-link {
|
||||
color: rgba(255,255,244,0.8);
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
footer.ft a:hover,
|
||||
footer.ft a:focus,
|
||||
footer.ft a:active,
|
||||
footer.ft .button-as-link:hover,
|
||||
footer.ft .button-as-link:focus,
|
||||
header.hd a:hover,
|
||||
header.hd a:focus,
|
||||
header.hd a:active,
|
||||
footer a:hover,
|
||||
footer a:focus,
|
||||
footer a:active,
|
||||
footer.button-as-link:hover,
|
||||
footer.button-as-link:focus,
|
||||
header a:hover,
|
||||
header a:focus,
|
||||
header a:active,
|
||||
button.button-as-link:hover,
|
||||
button.button-as-link:focus {
|
||||
color: rgba(255,255,244,1);
|
||||
|
|
@ -326,6 +334,11 @@ section.content {
|
|||
margin: 0 1rem;
|
||||
}
|
||||
|
||||
/* Footer */
|
||||
footer {
|
||||
margin-top: 1rem;
|
||||
}
|
||||
|
||||
/* Navigation menu */
|
||||
details.nav-menu > summary {
|
||||
list-style: none;
|
||||
|
|
@ -339,59 +352,25 @@ details.nav-menu > summary::-webkit-details-marker {
|
|||
}
|
||||
details .nav-menu-inner {
|
||||
position: absolute;
|
||||
top: 2.6rem;
|
||||
top: 2rem;
|
||||
right: 10px;
|
||||
width: 180px;
|
||||
background-color: #276890;
|
||||
padding: 1rem;
|
||||
z-index: 1000;
|
||||
padding: 0;
|
||||
}
|
||||
.nav-menu-inner li,
|
||||
form.nav-menu-logout {
|
||||
padding: 0.3rem 0.5rem;
|
||||
border-top: 1px solid #ffffff69;
|
||||
}
|
||||
.nav-menu-inner a {
|
||||
display: block;
|
||||
}
|
||||
|
||||
/* Table/database actions menu */
|
||||
.page-action-menu {
|
||||
.page-header {
|
||||
position: relative;
|
||||
margin-bottom: 0.5em;
|
||||
}
|
||||
.actions-menu-links {
|
||||
display: inline;
|
||||
}
|
||||
.actions-menu-links .dropdown-menu {
|
||||
position: absolute;
|
||||
top: calc(100% + 10px);
|
||||
left: 0;
|
||||
z-index: 10000;
|
||||
}
|
||||
.page-action-menu .icon-text {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
border-radius: .25rem;
|
||||
padding: 5px 12px 3px 7px;
|
||||
color: #fff;
|
||||
font-weight: 400;
|
||||
font-size: 0.8em;
|
||||
background: linear-gradient(180deg, #007bff 0%, #4E79C7 100%);
|
||||
border-color: #007bff;
|
||||
}
|
||||
.page-action-menu .icon-text span {
|
||||
/* Nudge text up a bit */
|
||||
position: relative;
|
||||
top: -2px;
|
||||
}
|
||||
.page-action-menu .icon-text:hover {
|
||||
cursor: pointer;
|
||||
}
|
||||
.page-action-menu .icon {
|
||||
width: 18px;
|
||||
height: 18px;
|
||||
margin-right: 4px;
|
||||
left: -10px;
|
||||
}
|
||||
|
||||
/* Components ============================================================== */
|
||||
|
|
@ -444,30 +423,36 @@ h2 em {
|
|||
.table-wrapper {
|
||||
overflow-x: auto;
|
||||
}
|
||||
table.rows-and-columns {
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
}
|
||||
table.rows-and-columns td {
|
||||
td {
|
||||
border-top: 1px solid #aaa;
|
||||
border-right: 1px solid #eee;
|
||||
padding: 4px;
|
||||
vertical-align: top;
|
||||
white-space: pre-wrap;
|
||||
}
|
||||
table.rows-and-columns td.type-pk {
|
||||
td.type-pk {
|
||||
font-weight: bold;
|
||||
}
|
||||
table.rows-and-columns td em {
|
||||
td em {
|
||||
font-style: normal;
|
||||
font-size: 0.8em;
|
||||
color: #aaa;
|
||||
}
|
||||
table.rows-and-columns th {
|
||||
th {
|
||||
padding-right: 1em;
|
||||
}
|
||||
table.rows-and-columns a:link {
|
||||
table a:link {
|
||||
text-decoration: none;
|
||||
}
|
||||
.rows-and-columns td:before {
|
||||
display: block;
|
||||
color: black;
|
||||
margin-left: -10%;
|
||||
font-size: 0.8em;
|
||||
}
|
||||
.rows-and-columns td ol,
|
||||
.rows-and-columns td ul {
|
||||
list-style: initial;
|
||||
|
|
@ -485,8 +470,10 @@ a.blob-download {
|
|||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
|
||||
/* Forms =================================================================== */
|
||||
|
||||
|
||||
form.sql textarea {
|
||||
border: 1px solid #ccc;
|
||||
width: 70%;
|
||||
|
|
@ -495,30 +482,27 @@ form.sql textarea {
|
|||
font-family: monospace;
|
||||
font-size: 1.3em;
|
||||
}
|
||||
form.sql label {
|
||||
form label {
|
||||
font-weight: bold;
|
||||
display: inline-block;
|
||||
width: 15%;
|
||||
}
|
||||
.advanced-export form label {
|
||||
width: auto;
|
||||
}
|
||||
.advanced-export input[type=submit] {
|
||||
font-size: 0.6em;
|
||||
margin-left: 1em;
|
||||
}
|
||||
label.sort_by_desc {
|
||||
width: auto;
|
||||
padding-right: 1em;
|
||||
}
|
||||
pre#sql-query {
|
||||
margin-bottom: 1em;
|
||||
}
|
||||
|
||||
.core label,
|
||||
label.core {
|
||||
font-weight: bold;
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.core input[type=text],
|
||||
input.core[type=text],
|
||||
.core input[type=search],
|
||||
input.core[type=search] {
|
||||
form input[type=text],
|
||||
form input[type=search] {
|
||||
border: 1px solid #ccc;
|
||||
border-radius: 3px;
|
||||
width: 60%;
|
||||
|
|
@ -527,27 +511,19 @@ input.core[type=search] {
|
|||
font-size: 1em;
|
||||
font-family: Helvetica, sans-serif;
|
||||
}
|
||||
.core input[type=search],
|
||||
input.core[type=search] {
|
||||
/* Stop Webkit from styling search boxes in an inconsistent way */
|
||||
/* https://css-tricks.com/webkit-html5-search-inputs/ comments */
|
||||
/* Stop Webkit from styling search boxes in an inconsistent way */
|
||||
/* https://css-tricks.com/webkit-html5-search-inputs/ comments */
|
||||
input[type=search] {
|
||||
-webkit-appearance: textfield;
|
||||
}
|
||||
.core input[type="search"]::-webkit-search-decoration,
|
||||
input.core[type="search"]::-webkit-search-decoration,
|
||||
.core input[type="search"]::-webkit-search-cancel-button,
|
||||
input.core[type="search"]::-webkit-search-cancel-button,
|
||||
.core input[type="search"]::-webkit-search-results-button,
|
||||
input.core[type="search"]::-webkit-search-results-button,
|
||||
.core input[type="search"]::-webkit-search-results-decoration,
|
||||
input.core[type="search"]::-webkit-search-results-decoration {
|
||||
input[type="search"]::-webkit-search-decoration,
|
||||
input[type="search"]::-webkit-search-cancel-button,
|
||||
input[type="search"]::-webkit-search-results-button,
|
||||
input[type="search"]::-webkit-search-results-decoration {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.core input[type=submit],
|
||||
.core button[type=button],
|
||||
input.core[type=submit],
|
||||
button.core[type=button] {
|
||||
form input[type=submit], form button[type=button] {
|
||||
font-weight: 400;
|
||||
cursor: pointer;
|
||||
text-align: center;
|
||||
|
|
@ -560,16 +536,14 @@ button.core[type=button] {
|
|||
border-radius: .25rem;
|
||||
}
|
||||
|
||||
.core input[type=submit],
|
||||
input.core[type=submit] {
|
||||
form input[type=submit] {
|
||||
color: #fff;
|
||||
background: linear-gradient(180deg, #007bff 0%, #4E79C7 100%);
|
||||
background-color: #007bff;
|
||||
border-color: #007bff;
|
||||
-webkit-appearance: button;
|
||||
}
|
||||
|
||||
.core button[type=button],
|
||||
button.core[type=button] {
|
||||
form button[type=button] {
|
||||
color: #007bff;
|
||||
background-color: #fff;
|
||||
border-color: #007bff;
|
||||
|
|
@ -599,9 +573,6 @@ button.core[type=button] {
|
|||
display: inline-block;
|
||||
margin-right: 0.3em;
|
||||
}
|
||||
.select-wrapper:focus-within {
|
||||
border: 1px solid black;
|
||||
}
|
||||
.select-wrapper.filter-op {
|
||||
width: 80px;
|
||||
}
|
||||
|
|
@ -759,7 +730,7 @@ p.zero-results {
|
|||
left: -9999px;
|
||||
}
|
||||
|
||||
table.rows-and-columns tr {
|
||||
.rows-and-columns tr {
|
||||
border: 1px solid #ccc;
|
||||
margin-bottom: 1em;
|
||||
border-radius: 10px;
|
||||
|
|
@ -767,7 +738,7 @@ p.zero-results {
|
|||
padding: 0.2rem;
|
||||
}
|
||||
|
||||
table.rows-and-columns td {
|
||||
.rows-and-columns td {
|
||||
/* Behave like a "row" */
|
||||
border: none;
|
||||
border-bottom: 1px solid #eee;
|
||||
|
|
@ -775,7 +746,7 @@ p.zero-results {
|
|||
padding-left: 10%;
|
||||
}
|
||||
|
||||
table.rows-and-columns td:before {
|
||||
.rows-and-columns td:before {
|
||||
display: block;
|
||||
color: black;
|
||||
margin-left: -10%;
|
||||
|
|
@ -847,13 +818,6 @@ svg.dropdown-menu-icon {
|
|||
.dropdown-menu a:hover {
|
||||
background-color: #eee;
|
||||
}
|
||||
.dropdown-menu .dropdown-description {
|
||||
margin: 0;
|
||||
color: #666;
|
||||
font-size: 0.8em;
|
||||
max-width: 80vw;
|
||||
white-space: normal;
|
||||
}
|
||||
.dropdown-menu .hook {
|
||||
display: block;
|
||||
position: absolute;
|
||||
|
|
|
|||
|
|
@ -1,210 +0,0 @@
|
|||
// Custom events for use with the native CustomEvent API
|
||||
const DATASETTE_EVENTS = {
|
||||
INIT: "datasette_init", // returns datasette manager instance in evt.detail
|
||||
};
|
||||
|
||||
// Datasette "core" -> Methods/APIs that are foundational
|
||||
// Plugins will have greater stability if they use the functional hooks- but if they do decide to hook into
|
||||
// literal DOM selectors, they'll have an easier time using these addresses.
|
||||
const DOM_SELECTORS = {
|
||||
/** Should have one match */
|
||||
jsonExportLink: ".export-links a[href*=json]",
|
||||
|
||||
/** Event listeners that go outside of the main table, e.g. existing scroll listener */
|
||||
tableWrapper: ".table-wrapper",
|
||||
table: "table.rows-and-columns",
|
||||
aboveTablePanel: ".above-table-panel",
|
||||
|
||||
// These could have multiple matches
|
||||
/** Used for selecting table headers. Use makeColumnActions if you want to add menu items. */
|
||||
tableHeaders: `table.rows-and-columns th`,
|
||||
|
||||
/** Used to add "where" clauses to query using direct manipulation */
|
||||
filterRows: ".filter-row",
|
||||
/** Used to show top available enum values for a column ("facets") */
|
||||
facetResults: ".facet-results [data-column]",
|
||||
};
|
||||
|
||||
/**
|
||||
* Monolith class for interacting with Datasette JS API
|
||||
* Imported with DEFER, runs after main document parsed
|
||||
* For now, manually synced with datasette/version.py
|
||||
*/
|
||||
const datasetteManager = {
|
||||
VERSION: window.datasetteVersion,
|
||||
|
||||
// TODO: Should order of registration matter more?
|
||||
|
||||
// Should plugins be allowed to clobber others or is it last-in takes priority?
|
||||
// Does pluginMetadata need to be serializable, or can we let it be stateful / have functions?
|
||||
plugins: new Map(),
|
||||
|
||||
registerPlugin: (name, pluginMetadata) => {
|
||||
if (datasetteManager.plugins.has(name)) {
|
||||
console.warn(`Warning -> plugin ${name} was redefined`);
|
||||
}
|
||||
datasetteManager.plugins.set(name, pluginMetadata);
|
||||
|
||||
// If the plugin participates in the panel... update the panel.
|
||||
if (pluginMetadata.makeAboveTablePanelConfigs) {
|
||||
datasetteManager.renderAboveTablePanel();
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* New DOM elements are created on each click, so the data is not stale.
|
||||
*
|
||||
* Items
|
||||
* - must provide label (text)
|
||||
* - might provide href (string) or an onclick ((evt) => void)
|
||||
*
|
||||
* columnMeta is metadata stored on the column header (TH) as a DOMStringMap
|
||||
* - column: string
|
||||
* - columnNotNull: boolean
|
||||
* - columnType: sqlite datatype enum (text, number, etc)
|
||||
* - isPk: boolean
|
||||
*/
|
||||
makeColumnActions: (columnMeta) => {
|
||||
let columnActions = [];
|
||||
|
||||
// Accept function that returns list of columnActions with keys
|
||||
// Required: label (text)
|
||||
// Optional: onClick or href
|
||||
datasetteManager.plugins.forEach((plugin) => {
|
||||
if (plugin.makeColumnActions) {
|
||||
// Plugins can provide multiple columnActions if they want
|
||||
// If multiple try to create entry with same label, the last one deletes the others
|
||||
columnActions.push(...plugin.makeColumnActions(columnMeta));
|
||||
}
|
||||
});
|
||||
|
||||
// TODO: Validate columnAction configs and give informative error message if missing keys.
|
||||
return columnActions;
|
||||
},
|
||||
|
||||
/**
|
||||
* In MVP, each plugin can only have 1 instance.
|
||||
* In future, panels could be repeated. We omit that for now since so many plugins depend on
|
||||
* shared URL state, so having multiple instances of plugin at same time is problematic.
|
||||
* Currently, we never destroy any panels, we just hide them.
|
||||
*
|
||||
* TODO: nicer panel css, show panel selection state.
|
||||
* TODO: does this hook need to take any arguments?
|
||||
*/
|
||||
renderAboveTablePanel: () => {
|
||||
const aboveTablePanel = document.querySelector(
|
||||
DOM_SELECTORS.aboveTablePanel,
|
||||
);
|
||||
|
||||
if (!aboveTablePanel) {
|
||||
console.warn(
|
||||
"This page does not have a table, the renderAboveTablePanel cannot be used.",
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
let aboveTablePanelWrapper = aboveTablePanel.querySelector(".panels");
|
||||
|
||||
// First render: create wrappers. Otherwise, reuse previous.
|
||||
if (!aboveTablePanelWrapper) {
|
||||
aboveTablePanelWrapper = document.createElement("div");
|
||||
aboveTablePanelWrapper.classList.add("tab-contents");
|
||||
const panelNav = document.createElement("div");
|
||||
panelNav.classList.add("tab-controls");
|
||||
|
||||
// Temporary: css for minimal amount of breathing room.
|
||||
panelNav.style.display = "flex";
|
||||
panelNav.style.gap = "8px";
|
||||
panelNav.style.marginTop = "4px";
|
||||
panelNav.style.marginBottom = "20px";
|
||||
|
||||
aboveTablePanel.appendChild(panelNav);
|
||||
aboveTablePanel.appendChild(aboveTablePanelWrapper);
|
||||
}
|
||||
|
||||
datasetteManager.plugins.forEach((plugin, pluginName) => {
|
||||
const { makeAboveTablePanelConfigs } = plugin;
|
||||
|
||||
if (makeAboveTablePanelConfigs) {
|
||||
const controls = aboveTablePanel.querySelector(".tab-controls");
|
||||
const contents = aboveTablePanel.querySelector(".tab-contents");
|
||||
|
||||
// Each plugin can make multiple panels
|
||||
const configs = makeAboveTablePanelConfigs();
|
||||
|
||||
configs.forEach((config, i) => {
|
||||
const nodeContentId = `${pluginName}_${config.id}_panel-content`;
|
||||
|
||||
// quit if we've already registered this plugin
|
||||
// TODO: look into whether plugins should be allowed to ask
|
||||
// parent to re-render, or if they should manage that internally.
|
||||
if (document.getElementById(nodeContentId)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Add tab control button
|
||||
const pluginControl = document.createElement("button");
|
||||
pluginControl.textContent = config.label;
|
||||
pluginControl.onclick = () => {
|
||||
contents.childNodes.forEach((node) => {
|
||||
if (node.id === nodeContentId) {
|
||||
node.style.display = "block";
|
||||
} else {
|
||||
node.style.display = "none";
|
||||
}
|
||||
});
|
||||
};
|
||||
controls.appendChild(pluginControl);
|
||||
|
||||
// Add plugin content area
|
||||
const pluginNode = document.createElement("div");
|
||||
pluginNode.id = nodeContentId;
|
||||
config.render(pluginNode);
|
||||
pluginNode.style.display = "none"; // Default to hidden unless you're ifrst
|
||||
|
||||
contents.appendChild(pluginNode);
|
||||
});
|
||||
|
||||
// Let first node be selected by default
|
||||
if (contents.childNodes.length) {
|
||||
contents.childNodes[0].style.display = "block";
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
/** Selectors for document (DOM) elements. Store identifier instead of immediate references in case they haven't loaded when Manager starts. */
|
||||
selectors: DOM_SELECTORS,
|
||||
|
||||
// Future API ideas
|
||||
// Fetch page's data in array, and cache so plugins could reuse it
|
||||
// Provide knowledge of what datasette JS or server-side via traditional console autocomplete
|
||||
// State helpers: URL params https://github.com/simonw/datasette/issues/1144 and localstorage
|
||||
// UI Hooks: command + k, tab manager hook
|
||||
// Should we notify plugins that have dependencies
|
||||
// when all dependencies were fulfilled? (leaflet, codemirror, etc)
|
||||
// https://github.com/simonw/datasette-leaflet -> this way
|
||||
// multiple plugins can all request the same copy of leaflet.
|
||||
};
|
||||
|
||||
const initializeDatasette = () => {
|
||||
// Hide the global behind __ prefix. Ideally they should be listening for the
|
||||
// DATASETTE_EVENTS.INIT event to avoid the habit of reading from the window.
|
||||
|
||||
window.__DATASETTE__ = datasetteManager;
|
||||
console.debug("Datasette Manager Created!");
|
||||
|
||||
const initDatasetteEvent = new CustomEvent(DATASETTE_EVENTS.INIT, {
|
||||
detail: datasetteManager,
|
||||
});
|
||||
|
||||
document.dispatchEvent(initDatasetteEvent);
|
||||
};
|
||||
|
||||
/**
|
||||
* Main function
|
||||
* Fires AFTER the document has been parsed
|
||||
*/
|
||||
document.addEventListener("DOMContentLoaded", function () {
|
||||
initializeDatasette();
|
||||
});
|
||||
|
|
@ -7,8 +7,8 @@ MIT Licensed
|
|||
typeof exports === "object" && typeof module !== "undefined"
|
||||
? (module.exports = factory())
|
||||
: typeof define === "function" && define.amd
|
||||
? define(factory)
|
||||
: (global.jsonFormatHighlight = factory());
|
||||
? define(factory)
|
||||
: (global.jsonFormatHighlight = factory());
|
||||
})(this, function () {
|
||||
"use strict";
|
||||
|
||||
|
|
@ -42,13 +42,13 @@ MIT Licensed
|
|||
color = /true/.test(match)
|
||||
? colors.trueColor
|
||||
: /false/.test(match)
|
||||
? colors.falseColor
|
||||
: /null/.test(match)
|
||||
? colors.nullColor
|
||||
: color;
|
||||
? colors.falseColor
|
||||
: /null/.test(match)
|
||||
? colors.nullColor
|
||||
: color;
|
||||
}
|
||||
return '<span style="color: ' + color + '">' + match + "</span>";
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,416 +0,0 @@
|
|||
class NavigationSearch extends HTMLElement {
|
||||
constructor() {
|
||||
super();
|
||||
this.attachShadow({ mode: "open" });
|
||||
this.selectedIndex = -1;
|
||||
this.matches = [];
|
||||
this.debounceTimer = null;
|
||||
|
||||
this.render();
|
||||
this.setupEventListeners();
|
||||
}
|
||||
|
||||
render() {
|
||||
this.shadowRoot.innerHTML = `
|
||||
<style>
|
||||
:host {
|
||||
display: contents;
|
||||
}
|
||||
|
||||
dialog {
|
||||
border: none;
|
||||
border-radius: 0.75rem;
|
||||
padding: 0;
|
||||
max-width: 90vw;
|
||||
width: 600px;
|
||||
max-height: 80vh;
|
||||
box-shadow: 0 20px 25px -5px rgba(0, 0, 0, 0.1), 0 10px 10px -5px rgba(0, 0, 0, 0.04);
|
||||
animation: slideIn 0.2s ease-out;
|
||||
}
|
||||
|
||||
dialog::backdrop {
|
||||
background: rgba(0, 0, 0, 0.5);
|
||||
backdrop-filter: blur(4px);
|
||||
animation: fadeIn 0.2s ease-out;
|
||||
}
|
||||
|
||||
@keyframes slideIn {
|
||||
from {
|
||||
opacity: 0;
|
||||
transform: translateY(-20px) scale(0.95);
|
||||
}
|
||||
to {
|
||||
opacity: 1;
|
||||
transform: translateY(0) scale(1);
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes fadeIn {
|
||||
from { opacity: 0; }
|
||||
to { opacity: 1; }
|
||||
}
|
||||
|
||||
.search-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.search-input-wrapper {
|
||||
padding: 1.25rem;
|
||||
border-bottom: 1px solid #e5e7eb;
|
||||
}
|
||||
|
||||
.search-input {
|
||||
width: 100%;
|
||||
padding: 0.75rem 1rem;
|
||||
font-size: 1rem;
|
||||
border: 2px solid #e5e7eb;
|
||||
border-radius: 0.5rem;
|
||||
outline: none;
|
||||
transition: border-color 0.2s;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
.search-input:focus {
|
||||
border-color: #2563eb;
|
||||
}
|
||||
|
||||
.results-container {
|
||||
overflow-y: auto;
|
||||
height: calc(80vh - 180px);
|
||||
padding: 0.5rem;
|
||||
}
|
||||
|
||||
.result-item {
|
||||
padding: 0.875rem 1rem;
|
||||
cursor: pointer;
|
||||
border-radius: 0.5rem;
|
||||
transition: background-color 0.15s;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.75rem;
|
||||
}
|
||||
|
||||
.result-item:hover {
|
||||
background-color: #f3f4f6;
|
||||
}
|
||||
|
||||
.result-item.selected {
|
||||
background-color: #dbeafe;
|
||||
}
|
||||
|
||||
.result-name {
|
||||
font-weight: 500;
|
||||
color: #111827;
|
||||
}
|
||||
|
||||
.result-url {
|
||||
font-size: 0.875rem;
|
||||
color: #6b7280;
|
||||
}
|
||||
|
||||
.no-results {
|
||||
padding: 2rem;
|
||||
text-align: center;
|
||||
color: #6b7280;
|
||||
}
|
||||
|
||||
.hint-text {
|
||||
padding: 0.75rem 1.25rem;
|
||||
font-size: 0.875rem;
|
||||
color: #6b7280;
|
||||
border-top: 1px solid #e5e7eb;
|
||||
display: flex;
|
||||
gap: 1rem;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
.hint-text kbd {
|
||||
background: #f3f4f6;
|
||||
padding: 0.125rem 0.375rem;
|
||||
border-radius: 0.25rem;
|
||||
font-size: 0.75rem;
|
||||
border: 1px solid #d1d5db;
|
||||
font-family: monospace;
|
||||
}
|
||||
|
||||
/* Mobile optimizations */
|
||||
@media (max-width: 640px) {
|
||||
dialog {
|
||||
width: 95vw;
|
||||
max-height: 85vh;
|
||||
border-radius: 0.5rem;
|
||||
}
|
||||
|
||||
.search-input-wrapper {
|
||||
padding: 1rem;
|
||||
}
|
||||
|
||||
.search-input {
|
||||
font-size: 16px; /* Prevents zoom on iOS */
|
||||
}
|
||||
|
||||
.result-item {
|
||||
padding: 1rem 0.75rem;
|
||||
}
|
||||
|
||||
.hint-text {
|
||||
font-size: 0.8rem;
|
||||
padding: 0.5rem 1rem;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
||||
<dialog>
|
||||
<div class="search-container">
|
||||
<div class="search-input-wrapper">
|
||||
<input
|
||||
type="text"
|
||||
class="search-input"
|
||||
placeholder="Search..."
|
||||
aria-label="Search navigation"
|
||||
autocomplete="off"
|
||||
spellcheck="false"
|
||||
>
|
||||
</div>
|
||||
<div class="results-container" role="listbox"></div>
|
||||
<div class="hint-text">
|
||||
<span><kbd>↑</kbd> <kbd>↓</kbd> Navigate</span>
|
||||
<span><kbd>Enter</kbd> Select</span>
|
||||
<span><kbd>Esc</kbd> Close</span>
|
||||
</div>
|
||||
</div>
|
||||
</dialog>
|
||||
`;
|
||||
}
|
||||
|
||||
setupEventListeners() {
|
||||
const dialog = this.shadowRoot.querySelector("dialog");
|
||||
const input = this.shadowRoot.querySelector(".search-input");
|
||||
const resultsContainer =
|
||||
this.shadowRoot.querySelector(".results-container");
|
||||
|
||||
// Global keyboard listener for "/"
|
||||
document.addEventListener("keydown", (e) => {
|
||||
if (e.key === "/" && !this.isInputFocused() && !dialog.open) {
|
||||
e.preventDefault();
|
||||
this.openMenu();
|
||||
}
|
||||
});
|
||||
|
||||
// Input event
|
||||
input.addEventListener("input", (e) => {
|
||||
this.handleSearch(e.target.value);
|
||||
});
|
||||
|
||||
// Keyboard navigation
|
||||
input.addEventListener("keydown", (e) => {
|
||||
if (e.key === "ArrowDown") {
|
||||
e.preventDefault();
|
||||
this.moveSelection(1);
|
||||
} else if (e.key === "ArrowUp") {
|
||||
e.preventDefault();
|
||||
this.moveSelection(-1);
|
||||
} else if (e.key === "Enter") {
|
||||
e.preventDefault();
|
||||
this.selectCurrentItem();
|
||||
} else if (e.key === "Escape") {
|
||||
this.closeMenu();
|
||||
}
|
||||
});
|
||||
|
||||
// Click on result item
|
||||
resultsContainer.addEventListener("click", (e) => {
|
||||
const item = e.target.closest(".result-item");
|
||||
if (item) {
|
||||
const index = parseInt(item.dataset.index);
|
||||
this.selectItem(index);
|
||||
}
|
||||
});
|
||||
|
||||
// Close on backdrop click
|
||||
dialog.addEventListener("click", (e) => {
|
||||
if (e.target === dialog) {
|
||||
this.closeMenu();
|
||||
}
|
||||
});
|
||||
|
||||
// Initial load
|
||||
this.loadInitialData();
|
||||
}
|
||||
|
||||
isInputFocused() {
|
||||
const activeElement = document.activeElement;
|
||||
return (
|
||||
activeElement &&
|
||||
(activeElement.tagName === "INPUT" ||
|
||||
activeElement.tagName === "TEXTAREA" ||
|
||||
activeElement.isContentEditable)
|
||||
);
|
||||
}
|
||||
|
||||
loadInitialData() {
|
||||
const itemsAttr = this.getAttribute("items");
|
||||
if (itemsAttr) {
|
||||
try {
|
||||
this.allItems = JSON.parse(itemsAttr);
|
||||
this.matches = this.allItems;
|
||||
} catch (e) {
|
||||
console.error("Failed to parse items attribute:", e);
|
||||
this.allItems = [];
|
||||
this.matches = [];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
handleSearch(query) {
|
||||
clearTimeout(this.debounceTimer);
|
||||
|
||||
this.debounceTimer = setTimeout(() => {
|
||||
const url = this.getAttribute("url");
|
||||
|
||||
if (url) {
|
||||
// Fetch from API
|
||||
this.fetchResults(url, query);
|
||||
} else {
|
||||
// Filter local items
|
||||
this.filterLocalItems(query);
|
||||
}
|
||||
}, 200);
|
||||
}
|
||||
|
||||
async fetchResults(url, query) {
|
||||
try {
|
||||
const searchUrl = `${url}?q=${encodeURIComponent(query)}`;
|
||||
const response = await fetch(searchUrl);
|
||||
const data = await response.json();
|
||||
this.matches = data.matches || [];
|
||||
this.selectedIndex = this.matches.length > 0 ? 0 : -1;
|
||||
this.renderResults();
|
||||
} catch (e) {
|
||||
console.error("Failed to fetch search results:", e);
|
||||
this.matches = [];
|
||||
this.renderResults();
|
||||
}
|
||||
}
|
||||
|
||||
filterLocalItems(query) {
|
||||
if (!query.trim()) {
|
||||
this.matches = [];
|
||||
} else {
|
||||
const lowerQuery = query.toLowerCase();
|
||||
this.matches = (this.allItems || []).filter(
|
||||
(item) =>
|
||||
item.name.toLowerCase().includes(lowerQuery) ||
|
||||
item.url.toLowerCase().includes(lowerQuery),
|
||||
);
|
||||
}
|
||||
this.selectedIndex = this.matches.length > 0 ? 0 : -1;
|
||||
this.renderResults();
|
||||
}
|
||||
|
||||
renderResults() {
|
||||
const container = this.shadowRoot.querySelector(".results-container");
|
||||
const input = this.shadowRoot.querySelector(".search-input");
|
||||
|
||||
if (this.matches.length === 0) {
|
||||
const message = input.value.trim()
|
||||
? "No results found"
|
||||
: "Start typing to search...";
|
||||
container.innerHTML = `<div class="no-results">${message}</div>`;
|
||||
return;
|
||||
}
|
||||
|
||||
container.innerHTML = this.matches
|
||||
.map(
|
||||
(match, index) => `
|
||||
<div
|
||||
class="result-item ${
|
||||
index === this.selectedIndex ? "selected" : ""
|
||||
}"
|
||||
data-index="${index}"
|
||||
role="option"
|
||||
aria-selected="${index === this.selectedIndex}"
|
||||
>
|
||||
<div>
|
||||
<div class="result-name">${this.escapeHtml(
|
||||
match.name,
|
||||
)}</div>
|
||||
<div class="result-url">${this.escapeHtml(match.url)}</div>
|
||||
</div>
|
||||
</div>
|
||||
`,
|
||||
)
|
||||
.join("");
|
||||
|
||||
// Scroll selected item into view
|
||||
if (this.selectedIndex >= 0) {
|
||||
const selectedItem = container.children[this.selectedIndex];
|
||||
if (selectedItem) {
|
||||
selectedItem.scrollIntoView({ block: "nearest" });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
moveSelection(direction) {
|
||||
const newIndex = this.selectedIndex + direction;
|
||||
if (newIndex >= 0 && newIndex < this.matches.length) {
|
||||
this.selectedIndex = newIndex;
|
||||
this.renderResults();
|
||||
}
|
||||
}
|
||||
|
||||
selectCurrentItem() {
|
||||
if (this.selectedIndex >= 0 && this.selectedIndex < this.matches.length) {
|
||||
this.selectItem(this.selectedIndex);
|
||||
}
|
||||
}
|
||||
|
||||
selectItem(index) {
|
||||
const match = this.matches[index];
|
||||
if (match) {
|
||||
// Dispatch custom event
|
||||
this.dispatchEvent(
|
||||
new CustomEvent("select", {
|
||||
detail: match,
|
||||
bubbles: true,
|
||||
composed: true,
|
||||
}),
|
||||
);
|
||||
|
||||
// Navigate to URL
|
||||
window.location.href = match.url;
|
||||
|
||||
this.closeMenu();
|
||||
}
|
||||
}
|
||||
|
||||
openMenu() {
|
||||
const dialog = this.shadowRoot.querySelector("dialog");
|
||||
const input = this.shadowRoot.querySelector(".search-input");
|
||||
|
||||
dialog.showModal();
|
||||
input.value = "";
|
||||
input.focus();
|
||||
|
||||
// Reset state - start with no items shown
|
||||
this.matches = [];
|
||||
this.selectedIndex = -1;
|
||||
this.renderResults();
|
||||
}
|
||||
|
||||
closeMenu() {
|
||||
const dialog = this.shadowRoot.querySelector("dialog");
|
||||
dialog.close();
|
||||
}
|
||||
|
||||
escapeHtml(text) {
|
||||
const div = document.createElement("div");
|
||||
div.textContent = text;
|
||||
return div.innerHTML;
|
||||
}
|
||||
}
|
||||
|
||||
// Register the custom element
|
||||
customElements.define("navigation-search", NavigationSearch);
|
||||
|
|
@ -17,8 +17,7 @@ var DROPDOWN_ICON_SVG = `<svg xmlns="http://www.w3.org/2000/svg" width="14" heig
|
|||
<path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z"></path>
|
||||
</svg>`;
|
||||
|
||||
/** Main initialization function for Datasette Table interactions */
|
||||
const initDatasetteTable = function (manager) {
|
||||
(function () {
|
||||
// Feature detection
|
||||
if (!window.URLSearchParams) {
|
||||
return;
|
||||
|
|
@ -69,11 +68,13 @@ const initDatasetteTable = function (manager) {
|
|||
menu.style.display = "none";
|
||||
menu.classList.remove("anim-scale-in");
|
||||
}
|
||||
|
||||
const tableWrapper = document.querySelector(manager.selectors.tableWrapper);
|
||||
if (tableWrapper) {
|
||||
tableWrapper.addEventListener("scroll", closeMenu);
|
||||
}
|
||||
// When page loads, add scroll listener on .table-wrapper
|
||||
document.addEventListener("DOMContentLoaded", () => {
|
||||
var tableWrapper = document.querySelector(".table-wrapper");
|
||||
if (tableWrapper) {
|
||||
tableWrapper.addEventListener("scroll", closeMenu);
|
||||
}
|
||||
});
|
||||
document.body.addEventListener("click", (ev) => {
|
||||
/* was this click outside the menu? */
|
||||
var target = ev.target;
|
||||
|
|
@ -84,11 +85,9 @@ const initDatasetteTable = function (manager) {
|
|||
closeMenu();
|
||||
}
|
||||
});
|
||||
|
||||
function onTableHeaderClick(ev) {
|
||||
function iconClicked(ev) {
|
||||
ev.preventDefault();
|
||||
ev.stopPropagation();
|
||||
menu.innerHTML = DROPDOWN_HTML;
|
||||
var th = ev.target;
|
||||
while (th.nodeName != "TH") {
|
||||
th = th.parentNode;
|
||||
|
|
@ -132,7 +131,7 @@ const initDatasetteTable = function (manager) {
|
|||
/* Only show "Facet by this" if it's not the first column, not selected,
|
||||
not a single PK and the Datasette allow_facet setting is True */
|
||||
var displayedFacets = Array.from(
|
||||
document.querySelectorAll(".facet-info"),
|
||||
document.querySelectorAll(".facet-info")
|
||||
).map((el) => el.dataset.column);
|
||||
var isFirstColumn =
|
||||
th.parentElement.querySelector("th:first-of-type") == th;
|
||||
|
|
@ -152,7 +151,7 @@ const initDatasetteTable = function (manager) {
|
|||
}
|
||||
/* Show notBlank option if not selected AND at least one visible blank value */
|
||||
var tdsForThisColumn = Array.from(
|
||||
th.closest("table").querySelectorAll("td." + th.className),
|
||||
th.closest("table").querySelectorAll("td." + th.className)
|
||||
);
|
||||
if (
|
||||
params.get(`${column}__notblank`) != "1" &&
|
||||
|
|
@ -186,61 +185,7 @@ const initDatasetteTable = function (manager) {
|
|||
menu.style.left = menuLeft + "px";
|
||||
menu.style.display = "block";
|
||||
menu.classList.add("anim-scale-in");
|
||||
|
||||
// Custom menu items on each render
|
||||
// Plugin hook: allow adding JS-based additional menu items
|
||||
const columnActionsPayload = {
|
||||
columnName: th.dataset.column,
|
||||
columnNotNull: th.dataset.columnNotNull === "1",
|
||||
columnType: th.dataset.columnType,
|
||||
isPk: th.dataset.isPk === "1",
|
||||
};
|
||||
const columnItemConfigs = manager.makeColumnActions(columnActionsPayload);
|
||||
|
||||
const menuList = menu.querySelector("ul");
|
||||
columnItemConfigs.forEach((itemConfig) => {
|
||||
// Remove items from previous render. We assume entries have unique labels.
|
||||
const existingItems = menuList.querySelectorAll(`li`);
|
||||
Array.from(existingItems)
|
||||
.filter((item) => item.innerText === itemConfig.label)
|
||||
.forEach((node) => {
|
||||
node.remove();
|
||||
});
|
||||
|
||||
const newLink = document.createElement("a");
|
||||
newLink.textContent = itemConfig.label;
|
||||
newLink.href = itemConfig.href ?? "#";
|
||||
if (itemConfig.onClick) {
|
||||
newLink.onclick = itemConfig.onClick;
|
||||
}
|
||||
|
||||
// Attach new elements to DOM
|
||||
const menuItem = document.createElement("li");
|
||||
menuItem.appendChild(newLink);
|
||||
menuList.appendChild(menuItem);
|
||||
});
|
||||
|
||||
// Measure width of menu and adjust position if too far right
|
||||
const menuWidth = menu.offsetWidth;
|
||||
const windowWidth = window.innerWidth;
|
||||
if (menuLeft + menuWidth > windowWidth) {
|
||||
menu.style.left = windowWidth - menuWidth - 20 + "px";
|
||||
}
|
||||
// Align menu .hook arrow with the column cog icon
|
||||
const hook = menu.querySelector(".hook");
|
||||
const icon = th.querySelector(".dropdown-menu-icon");
|
||||
const iconRect = icon.getBoundingClientRect();
|
||||
const hookLeft = iconRect.left - menuLeft + 1 + "px";
|
||||
hook.style.left = hookLeft;
|
||||
// Move the whole menu right if the hook is too far right
|
||||
const menuRect = menu.getBoundingClientRect();
|
||||
if (iconRect.right > menuRect.right) {
|
||||
menu.style.left = iconRect.right - menuWidth + "px";
|
||||
// And move hook tip as well
|
||||
hook.style.left = menuWidth - 13 + "px";
|
||||
}
|
||||
}
|
||||
|
||||
var svg = document.createElement("div");
|
||||
svg.innerHTML = DROPDOWN_ICON_SVG;
|
||||
svg = svg.querySelector("*");
|
||||
|
|
@ -252,25 +197,23 @@ const initDatasetteTable = function (manager) {
|
|||
menu.style.display = "none";
|
||||
document.body.appendChild(menu);
|
||||
|
||||
var ths = Array.from(
|
||||
document.querySelectorAll(manager.selectors.tableHeaders),
|
||||
);
|
||||
var ths = Array.from(document.querySelectorAll(".rows-and-columns th"));
|
||||
ths.forEach((th) => {
|
||||
if (!th.querySelector("a")) {
|
||||
return;
|
||||
}
|
||||
var icon = svg.cloneNode(true);
|
||||
icon.addEventListener("click", onTableHeaderClick);
|
||||
icon.addEventListener("click", iconClicked);
|
||||
th.appendChild(icon);
|
||||
});
|
||||
};
|
||||
})();
|
||||
|
||||
/* Add x buttons to the filter rows */
|
||||
function addButtonsToFilterRows(manager) {
|
||||
(function () {
|
||||
var x = "✖";
|
||||
var rows = Array.from(
|
||||
document.querySelectorAll(manager.selectors.filterRow),
|
||||
).filter((el) => el.querySelector(".filter-op"));
|
||||
var rows = Array.from(document.querySelectorAll(".filter-row")).filter((el) =>
|
||||
el.querySelector(".filter-op")
|
||||
);
|
||||
rows.forEach((row) => {
|
||||
var a = document.createElement("a");
|
||||
a.setAttribute("href", "#");
|
||||
|
|
@ -291,18 +234,18 @@ function addButtonsToFilterRows(manager) {
|
|||
a.style.display = "none";
|
||||
}
|
||||
});
|
||||
}
|
||||
})();
|
||||
|
||||
/* Set up datalist autocomplete for filter values */
|
||||
function initAutocompleteForFilterValues(manager) {
|
||||
(function () {
|
||||
function createDataLists() {
|
||||
var facetResults = document.querySelectorAll(
|
||||
manager.selectors.facetResults,
|
||||
".facet-results [data-column]"
|
||||
);
|
||||
Array.from(facetResults).forEach(function (facetResult) {
|
||||
// Use link text from all links in the facet result
|
||||
var links = Array.from(
|
||||
facetResult.querySelectorAll("li:not(.facet-truncated) a"),
|
||||
facetResult.querySelectorAll("li:not(.facet-truncated) a")
|
||||
);
|
||||
// Create a datalist element
|
||||
var datalist = document.createElement("datalist");
|
||||
|
|
@ -323,21 +266,9 @@ function initAutocompleteForFilterValues(manager) {
|
|||
document.body.addEventListener("change", function (event) {
|
||||
if (event.target.name === "_filter_column") {
|
||||
event.target
|
||||
.closest(manager.selectors.filterRow)
|
||||
.closest(".filter-row")
|
||||
.querySelector(".filter-value")
|
||||
.setAttribute("list", "datalist-" + event.target.value);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Ensures Table UI is initialized only after the Manager is ready.
|
||||
document.addEventListener("datasette_init", function (evt) {
|
||||
const { detail: manager } = evt;
|
||||
|
||||
// Main table
|
||||
initDatasetteTable(manager);
|
||||
|
||||
// Other UI functions with interactive JS needs
|
||||
addButtonsToFilterRows(manager);
|
||||
initAutocompleteForFilterValues(manager);
|
||||
});
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -1,28 +0,0 @@
|
|||
{% if action_links %}
|
||||
<div class="page-action-menu">
|
||||
<details class="actions-menu-links details-menu">
|
||||
<summary>
|
||||
<div class="icon-text">
|
||||
<svg class="icon" aria-labelledby="actions-menu-links-title" role="img" style="color: #fff" xmlns="http://www.w3.org/2000/svg" width="28" height="28" viewBox="0 0 28 28" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||
<title id="actions-menu-links-title">{{ action_title }}</title>
|
||||
<circle cx="12" cy="12" r="3"></circle>
|
||||
<path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z"></path>
|
||||
</svg>
|
||||
<span>{{ action_title }}</span>
|
||||
</div>
|
||||
</summary>
|
||||
<div class="dropdown-menu">
|
||||
<div class="hook"></div>
|
||||
<ul>
|
||||
{% for link in action_links %}
|
||||
<li><a href="{{ link.href }}">{{ link.label }}
|
||||
{% if link.description %}
|
||||
<p class="dropdown-description">{{ link.description }}</p>
|
||||
{% endif %}</a>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</div>
|
||||
</details>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
|
@ -1,50 +0,0 @@
|
|||
<script>
|
||||
// Common utility functions for debug pages
|
||||
|
||||
// Populate form from URL parameters on page load
|
||||
function populateFormFromURL() {
|
||||
const params = new URLSearchParams(window.location.search);
|
||||
|
||||
const action = params.get('action');
|
||||
if (action) {
|
||||
const actionField = document.getElementById('action');
|
||||
if (actionField) {
|
||||
actionField.value = action;
|
||||
}
|
||||
}
|
||||
|
||||
const parent = params.get('parent');
|
||||
if (parent) {
|
||||
const parentField = document.getElementById('parent');
|
||||
if (parentField) {
|
||||
parentField.value = parent;
|
||||
}
|
||||
}
|
||||
|
||||
const child = params.get('child');
|
||||
if (child) {
|
||||
const childField = document.getElementById('child');
|
||||
if (childField) {
|
||||
childField.value = child;
|
||||
}
|
||||
}
|
||||
|
||||
const pageSize = params.get('page_size');
|
||||
if (pageSize) {
|
||||
const pageSizeField = document.getElementById('page_size');
|
||||
if (pageSizeField) {
|
||||
pageSizeField.value = pageSize;
|
||||
}
|
||||
}
|
||||
|
||||
return params;
|
||||
}
|
||||
|
||||
// HTML escape function
|
||||
function escapeHtml(text) {
|
||||
if (text === null || text === undefined) return '';
|
||||
const div = document.createElement('div');
|
||||
div.textContent = text;
|
||||
return div.innerHTML;
|
||||
}
|
||||
</script>
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
{% if metadata.get("description_html") or metadata.get("description") %}
|
||||
{% if metadata.description_html or metadata.description %}
|
||||
<div class="metadata-description">
|
||||
{% if metadata.get("description_html") %}
|
||||
{% if metadata.description_html %}
|
||||
{{ metadata.description_html|safe }}
|
||||
{% else %}
|
||||
{{ metadata.description }}
|
||||
|
|
|
|||
|
|
@ -1,145 +0,0 @@
|
|||
<style>
|
||||
.permission-form {
|
||||
background-color: #f5f5f5;
|
||||
border: 1px solid #ddd;
|
||||
border-radius: 5px;
|
||||
padding: 1.5em;
|
||||
margin-bottom: 2em;
|
||||
}
|
||||
.form-section {
|
||||
margin-bottom: 1em;
|
||||
}
|
||||
.form-section label {
|
||||
display: block;
|
||||
margin-bottom: 0.3em;
|
||||
font-weight: bold;
|
||||
}
|
||||
.form-section input[type="text"],
|
||||
.form-section select {
|
||||
width: 100%;
|
||||
max-width: 500px;
|
||||
padding: 0.5em;
|
||||
box-sizing: border-box;
|
||||
border: 1px solid #ccc;
|
||||
border-radius: 3px;
|
||||
}
|
||||
.form-section input[type="text"]:focus,
|
||||
.form-section select:focus {
|
||||
outline: 2px solid #0066cc;
|
||||
border-color: #0066cc;
|
||||
}
|
||||
.form-section small {
|
||||
display: block;
|
||||
margin-top: 0.3em;
|
||||
color: #666;
|
||||
}
|
||||
.form-actions {
|
||||
margin-top: 1em;
|
||||
}
|
||||
.submit-btn {
|
||||
padding: 0.6em 1.5em;
|
||||
font-size: 1em;
|
||||
background-color: #0066cc;
|
||||
color: white;
|
||||
border: none;
|
||||
border-radius: 3px;
|
||||
cursor: pointer;
|
||||
}
|
||||
.submit-btn:hover {
|
||||
background-color: #0052a3;
|
||||
}
|
||||
.submit-btn:disabled {
|
||||
background-color: #ccc;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
.results-container {
|
||||
margin-top: 2em;
|
||||
}
|
||||
.results-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
margin-bottom: 1em;
|
||||
}
|
||||
.results-count {
|
||||
font-size: 0.9em;
|
||||
color: #666;
|
||||
}
|
||||
.results-table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
background-color: white;
|
||||
box-shadow: 0 1px 3px rgba(0,0,0,0.1);
|
||||
}
|
||||
.results-table th {
|
||||
background-color: #f5f5f5;
|
||||
padding: 0.75em;
|
||||
text-align: left;
|
||||
font-weight: bold;
|
||||
border-bottom: 2px solid #ddd;
|
||||
}
|
||||
.results-table td {
|
||||
padding: 0.75em;
|
||||
border-bottom: 1px solid #eee;
|
||||
}
|
||||
.results-table tr:hover {
|
||||
background-color: #f9f9f9;
|
||||
}
|
||||
.results-table tr.allow-row {
|
||||
background-color: #f1f8f4;
|
||||
}
|
||||
.results-table tr.allow-row:hover {
|
||||
background-color: #e8f5e9;
|
||||
}
|
||||
.results-table tr.deny-row {
|
||||
background-color: #fef5f5;
|
||||
}
|
||||
.results-table tr.deny-row:hover {
|
||||
background-color: #ffebee;
|
||||
}
|
||||
.resource-path {
|
||||
font-family: monospace;
|
||||
background-color: #f5f5f5;
|
||||
padding: 0.2em 0.4em;
|
||||
border-radius: 3px;
|
||||
}
|
||||
.pagination {
|
||||
margin-top: 1.5em;
|
||||
display: flex;
|
||||
gap: 1em;
|
||||
align-items: center;
|
||||
}
|
||||
.pagination a {
|
||||
padding: 0.5em 1em;
|
||||
background-color: #0066cc;
|
||||
color: white;
|
||||
text-decoration: none;
|
||||
border-radius: 3px;
|
||||
}
|
||||
.pagination a:hover {
|
||||
background-color: #0052a3;
|
||||
}
|
||||
.pagination span {
|
||||
color: #666;
|
||||
}
|
||||
.no-results {
|
||||
padding: 2em;
|
||||
text-align: center;
|
||||
color: #666;
|
||||
background-color: #f9f9f9;
|
||||
border: 1px solid #ddd;
|
||||
border-radius: 5px;
|
||||
}
|
||||
.error-message {
|
||||
padding: 1em;
|
||||
background-color: #ffebee;
|
||||
border: 2px solid #f44336;
|
||||
border-radius: 5px;
|
||||
color: #c62828;
|
||||
}
|
||||
.loading {
|
||||
padding: 2em;
|
||||
text-align: center;
|
||||
color: #666;
|
||||
}
|
||||
</style>
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
{% if has_debug_permission %}
|
||||
{% set query_string = '?' + request.query_string if request.query_string else '' %}
|
||||
|
||||
<style>
|
||||
.permissions-debug-tabs {
|
||||
border-bottom: 2px solid #e0e0e0;
|
||||
margin-bottom: 2em;
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: 0.5em;
|
||||
}
|
||||
.permissions-debug-tabs a {
|
||||
padding: 0.75em 1.25em;
|
||||
text-decoration: none;
|
||||
color: #333;
|
||||
border-bottom: 3px solid transparent;
|
||||
margin-bottom: -2px;
|
||||
transition: all 0.2s;
|
||||
font-weight: 500;
|
||||
}
|
||||
.permissions-debug-tabs a:hover {
|
||||
background-color: #f5f5f5;
|
||||
border-bottom-color: #999;
|
||||
}
|
||||
.permissions-debug-tabs a.active {
|
||||
color: #0066cc;
|
||||
border-bottom-color: #0066cc;
|
||||
background-color: #f0f7ff;
|
||||
}
|
||||
@media only screen and (max-width: 576px) {
|
||||
.permissions-debug-tabs {
|
||||
flex-direction: column;
|
||||
gap: 0;
|
||||
}
|
||||
.permissions-debug-tabs a {
|
||||
border-bottom: 1px solid #e0e0e0;
|
||||
margin-bottom: 0;
|
||||
}
|
||||
.permissions-debug-tabs a.active {
|
||||
border-left: 3px solid #0066cc;
|
||||
border-bottom: 1px solid #e0e0e0;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
||||
<nav class="permissions-debug-tabs">
|
||||
<a href="{{ urls.path('-/permissions') }}" {% if current_tab == "permissions" %}class="active"{% endif %}>Playground</a>
|
||||
<a href="{{ urls.path('-/check') }}{{ query_string }}" {% if current_tab == "check" %}class="active"{% endif %}>Check</a>
|
||||
<a href="{{ urls.path('-/allowed') }}{{ query_string }}" {% if current_tab == "allowed" %}class="active"{% endif %}>Allowed</a>
|
||||
<a href="{{ urls.path('-/rules') }}{{ query_string }}" {% if current_tab == "rules" %}class="active"{% endif %}>Rules</a>
|
||||
<a href="{{ urls.path('-/actions') }}" {% if current_tab == "actions" %}class="active"{% endif %}>Actions</a>
|
||||
<a href="{{ urls.path('-/allow-debug') }}" {% if current_tab == "allow_debug" %}class="active"{% endif %}>Allow debug</a>
|
||||
</nav>
|
||||
{% endif %}
|
||||
|
|
@ -1,3 +1,3 @@
|
|||
<p class="suggested-facets">
|
||||
Suggested facets: {% for facet in suggested_facets %}<a href="{{ facet.toggle_url }}#facet-{{ facet.name|to_css_class }}">{{ facet.name }}</a>{% if facet.get("type") %} ({{ facet.type }}){% endif %}{% if not loop.last %}, {% endif %}{% endfor %}
|
||||
Suggested facets: {% for facet in suggested_facets %}<a href="{{ facet.toggle_url }}#facet-{{ facet.name|to_css_class }}">{{ facet.name }}</a>{% if facet.type %} ({{ facet.type }}){% endif %}{% if not loop.last %}, {% endif %}{% endfor %}
|
||||
</p>
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
<!-- above-table-panel is a hook node for plugins to attach to . Displays even if no data available -->
|
||||
<div class="above-table-panel"> </div>
|
||||
{% if display_rows %}
|
||||
<div class="table-wrapper">
|
||||
<table class="rows-and-columns">
|
||||
|
|
|
|||
|
|
@ -33,12 +33,9 @@ p.message-warning {
|
|||
|
||||
<h1>Debug allow rules</h1>
|
||||
|
||||
{% set current_tab = "allow_debug" %}
|
||||
{% include "_permissions_debug_tabs.html" %}
|
||||
|
||||
<p>Use this tool to try out different actor and allow combinations. See <a href="https://docs.datasette.io/en/stable/authentication.html#defining-permissions-with-allow-blocks">Defining permissions with "allow" blocks</a> for documentation.</p>
|
||||
|
||||
<form class="core" action="{{ urls.path('-/allow-debug') }}" method="get" style="margin-bottom: 1em">
|
||||
<form action="{{ urls.path('-/allow-debug') }}" method="get" style="margin-bottom: 1em">
|
||||
<div class="two-col">
|
||||
<p><label>Allow block</label></p>
|
||||
<textarea name="allow">{{ allow_input }}</textarea>
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@
|
|||
|
||||
{% block content %}
|
||||
|
||||
<h1>API Explorer{% if private %} 🔒{% endif %}</h1>
|
||||
<h1>API Explorer</h1>
|
||||
|
||||
<p>Use this tool to try out the
|
||||
{% if datasette_version %}
|
||||
|
|
@ -19,7 +19,7 @@
|
|||
</p>
|
||||
<details open style="border: 2px solid #ccc; border-bottom: none; padding: 0.5em">
|
||||
<summary style="cursor: pointer;">GET</summary>
|
||||
<form class="core" method="get" id="api-explorer-get" style="margin-top: 0.7em">
|
||||
<form method="get" id="api-explorer-get" style="margin-top: 0.7em">
|
||||
<div>
|
||||
<label for="path">API path:</label>
|
||||
<input type="text" id="path" name="path" style="width: 60%">
|
||||
|
|
@ -29,7 +29,7 @@
|
|||
</details>
|
||||
<details style="border: 2px solid #ccc; padding: 0.5em">
|
||||
<summary style="cursor: pointer">POST</summary>
|
||||
<form class="core" method="post" id="api-explorer-post" style="margin-top: 0.7em">
|
||||
<form method="post" id="api-explorer-post" style="margin-top: 0.7em">
|
||||
<div>
|
||||
<label for="path">API path:</label>
|
||||
<input type="text" id="path" name="path" style="width: 60%">
|
||||
|
|
|
|||
|
|
@ -1,16 +1,14 @@
|
|||
{% import "_crumbs.html" as crumbs with context %}<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<html>
|
||||
<head>
|
||||
<title>{% block title %}{% endblock %}</title>
|
||||
<link rel="stylesheet" href="{{ urls.static('app.css') }}?{{ app_css_hash }}">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
|
||||
{% for url in extra_css_urls %}
|
||||
<link rel="stylesheet" href="{{ url.url }}"{% if url.get("sri") %} integrity="{{ url.sri }}" crossorigin="anonymous"{% endif %}>
|
||||
<link rel="stylesheet" href="{{ url.url }}"{% if url.sri %} integrity="{{ url.sri }}" crossorigin="anonymous"{% endif %}>
|
||||
{% endfor %}
|
||||
<script>window.datasetteVersion = '{{ datasette_version }}';</script>
|
||||
<script src="{{ urls.static('datasette-manager.js') }}" defer></script>
|
||||
{% for url in extra_js_urls %}
|
||||
<script {% if url.module %}type="module" {% endif %}src="{{ url.url }}"{% if url.get("sri") %} integrity="{{ url.sri }}" crossorigin="anonymous"{% endif %}></script>
|
||||
<script {% if url.module %}type="module" {% endif %}src="{{ url.url }}"{% if url.sri %} integrity="{{ url.sri }}" crossorigin="anonymous"{% endif %}></script>
|
||||
{% endfor %}
|
||||
{%- if alternate_url_json -%}
|
||||
<link rel="alternate" type="application/json+datasette" href="{{ alternate_url_json }}">
|
||||
|
|
@ -19,7 +17,7 @@
|
|||
</head>
|
||||
<body class="{% block body_class %}{% endblock %}">
|
||||
<div class="not-footer">
|
||||
<header class="hd"><nav>{% block nav %}{% block crumbs %}{{ crumbs.nav(request=request) }}{% endblock %}
|
||||
<header><nav>{% block nav %}{% block crumbs %}{{ crumbs.nav(request=request) }}{% endblock %}
|
||||
{% set links = menu_links() %}{% if links or show_logout %}
|
||||
<details class="nav-menu details-menu">
|
||||
<summary><svg aria-labelledby="nav-menu-svg-title" role="img"
|
||||
|
|
@ -37,7 +35,7 @@
|
|||
</ul>
|
||||
{% endif %}
|
||||
{% if show_logout %}
|
||||
<form class="nav-menu-logout" action="{{ urls.logout() }}" method="post">
|
||||
<form action="{{ urls.logout() }}" method="post">
|
||||
<input type="hidden" name="csrftoken" value="{{ csrftoken() }}">
|
||||
<button class="button-as-link">Log out</button>
|
||||
</form>{% endif %}
|
||||
|
|
@ -72,7 +70,5 @@
|
|||
{% endfor %}
|
||||
|
||||
{% if select_templates %}<!-- Templates considered: {{ select_templates|join(", ") }} -->{% endif %}
|
||||
<script src="{{ urls.static('navigation-search.js') }}" defer></script>
|
||||
<navigation-search url="/-/tables"></navigation-search>
|
||||
</body>
|
||||
</html>
|
||||
|
|
|
|||
|
|
@ -2,36 +2,11 @@
|
|||
|
||||
{% block title %}Create an API token{% endblock %}
|
||||
|
||||
{% block extra_head %}
|
||||
<style type="text/css">
|
||||
#restrict-permissions label {
|
||||
display: inline;
|
||||
width: 90%;
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
|
||||
<h1>Create an API token</h1>
|
||||
|
||||
<p>This token will allow API access with the same abilities as your current user, <strong>{{ request.actor.id }}</strong></p>
|
||||
|
||||
{% if token %}
|
||||
<div>
|
||||
<h2>Your API token</h2>
|
||||
<form>
|
||||
<input type="text" class="copyable" style="width: 40%" value="{{ token }}">
|
||||
<span class="copy-link-wrapper"></span>
|
||||
</form>
|
||||
<!--- show token in a <details> -->
|
||||
<details style="margin-top: 1em">
|
||||
<summary>Token details</summary>
|
||||
<pre>{{ token_bits|tojson(4) }}</pre>
|
||||
</details>
|
||||
</div>
|
||||
<h2>Create another token</h2>
|
||||
{% endif %}
|
||||
<p>This token will allow API access with the same abilities as your current user.</p>
|
||||
|
||||
{% if errors %}
|
||||
{% for error in errors %}
|
||||
|
|
@ -39,7 +14,7 @@
|
|||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
<form class="core" action="{{ urls.path('-/create-token') }}" method="post">
|
||||
<form action="{{ urls.path('-/create-token') }}" method="post">
|
||||
<div>
|
||||
<div class="select-wrapper" style="width: unset">
|
||||
<select name="expire_type">
|
||||
|
|
@ -52,39 +27,23 @@
|
|||
<input type="text" name="expire_duration" style="width: 10%">
|
||||
<input type="hidden" name="csrftoken" value="{{ csrftoken() }}">
|
||||
<input type="submit" value="Create token">
|
||||
|
||||
<details style="margin-top: 1em" id="restrict-permissions">
|
||||
<summary style="cursor: pointer;">Restrict actions that can be performed using this token</summary>
|
||||
<h2>All databases and tables</h2>
|
||||
<ul>
|
||||
{% for permission in all_actions %}
|
||||
<li><label><input type="checkbox" name="all:{{ permission }}"> {{ permission }}</label></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
|
||||
{% for database in database_with_tables %}
|
||||
<h2>All tables in "{{ database.name }}"</h2>
|
||||
<ul>
|
||||
{% for permission in database_actions %}
|
||||
<li><label><input type="checkbox" name="database:{{ database.encoded }}:{{ permission }}"> {{ permission }}</label></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endfor %}
|
||||
<h2>Specific tables</h2>
|
||||
{% for database in database_with_tables %}
|
||||
{% for table in database.tables %}
|
||||
<h3>{{ database.name }}: {{ table.name }}</h3>
|
||||
<ul>
|
||||
{% for permission in child_actions %}
|
||||
<li><label><input type="checkbox" name="resource:{{ database.encoded }}:{{ table.encoded }}:{{ permission }}"> {{ permission }}</label></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
</details>
|
||||
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
{% if token %}
|
||||
<div>
|
||||
<h2>Your API token</h2>
|
||||
<form>
|
||||
<input type="text" class="copyable" style="width: 40%" value="{{ token }}">
|
||||
<span class="copy-link-wrapper"></span>
|
||||
</form>
|
||||
<!--- show token in a <details> -->
|
||||
<details style="margin-top: 1em">
|
||||
<summary>Token details</summary>
|
||||
<pre>{{ token_bits|tojson }}</pre>
|
||||
</details>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<script>
|
||||
var expireDuration = document.querySelector('input[name="expire_duration"]');
|
||||
|
|
|
|||
|
|
@ -1,13 +0,0 @@
|
|||
{% extends "base.html" %}
|
||||
{% block title %}CSRF check failed){% endblock %}
|
||||
{% block content %}
|
||||
<h1>Form origin check failed</h1>
|
||||
|
||||
<p>Your request's origin could not be validated. Please return to the form and submit it again.</p>
|
||||
|
||||
<details><summary>Technical details</summary>
|
||||
<p>Developers: consult Datasette's <a href="https://docs.datasette.io/en/latest/internals.html#csrf-protection">CSRF protection documentation</a>.</p>
|
||||
<p>Error code is {{ message_name }}.</p>
|
||||
</details>
|
||||
|
||||
{% endblock %}
|
||||
|
|
@ -9,23 +9,35 @@
|
|||
|
||||
{% block body_class %}db db-{{ database|to_css_class }}{% endblock %}
|
||||
|
||||
{% block crumbs %}
|
||||
{{ crumbs.nav(request=request, database=database) }}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="page-header" style="border-color: #{{ database_color }}">
|
||||
<div class="page-header" style="border-color: #{{ database_color(database) }}">
|
||||
<h1>{{ metadata.title or database }}{% if private %} 🔒{% endif %}</h1>
|
||||
</div>
|
||||
{% set action_links, action_title = database_actions(), "Database actions" %}
|
||||
{% include "_action_menu.html" %}
|
||||
|
||||
{{ top_database() }}
|
||||
{% set links = database_actions() %}{% if links %}
|
||||
<details class="actions-menu-links details-menu">
|
||||
<summary><svg aria-labelledby="actions-menu-links-title" role="img"
|
||||
style="color: #666" xmlns="http://www.w3.org/2000/svg"
|
||||
width="28" height="28" viewBox="0 0 24 24" fill="none"
|
||||
stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||
<title id="actions-menu-links-title">Table actions</title>
|
||||
<circle cx="12" cy="12" r="3"></circle>
|
||||
<path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z"></path>
|
||||
</svg></summary>
|
||||
<div class="dropdown-menu">
|
||||
{% if links %}
|
||||
<ul>
|
||||
{% for link in links %}
|
||||
<li><a href="{{ link.href }}">{{ link.label }}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
</div>
|
||||
</details>{% endif %}
|
||||
</div>
|
||||
|
||||
{% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %}
|
||||
|
||||
{% if allow_execute_sql %}
|
||||
<form class="sql core" action="{{ urls.database(database) }}/-/query" method="get">
|
||||
<form class="sql" action="{{ urls.database(database) }}" method="get">
|
||||
<h3>Custom SQL query</h3>
|
||||
<p><textarea id="sql-editor" name="sql">{% if tables %}select * from {{ tables[0].name|escape_sqlite }}{% else %}select sqlite_version(){% endif %}</textarea></p>
|
||||
<p>
|
||||
|
|
@ -40,7 +52,7 @@
|
|||
<p>The following databases are attached to this connection, and can be used for cross-database joins:</p>
|
||||
<ul class="bullets">
|
||||
{% for db_name in attached_databases %}
|
||||
<li><strong>{{ db_name }}</strong> - <a href="{{ urls.database(db_name) }}/-/query?sql=select+*+from+[{{ db_name }}].sqlite_master+where+type='table'">tables</a></li>
|
||||
<li><strong>{{ db_name }}</strong> - <a href="?sql=select+*+from+[{{ db_name }}].sqlite_master+where+type='table'">tables</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</div>
|
||||
|
|
@ -56,7 +68,7 @@
|
|||
{% endif %}
|
||||
|
||||
{% if tables %}
|
||||
<h2 id="tables">Tables <a style="font-weight: normal; font-size: 0.75em; padding-left: 0.5em;" href="{{ urls.database(database) }}/-/schema">schema</a></h2>
|
||||
<h2 id="tables">Tables</h2>
|
||||
{% endif %}
|
||||
|
||||
{% for table in tables %}
|
||||
|
|
@ -64,7 +76,7 @@
|
|||
<div class="db-table">
|
||||
<h3><a href="{{ urls.table(database, table.name) }}">{{ table.name }}</a>{% if table.private %} 🔒{% endif %}{% if table.hidden %}<em> (hidden)</em>{% endif %}</h3>
|
||||
<p><em>{% for column in table.columns %}{{ column }}{% if not loop.last %}, {% endif %}{% endfor %}</em></p>
|
||||
<p>{% if table.count is none %}Many rows{% elif table.count == count_limit + 1 %}>{{ "{:,}".format(count_limit) }} rows{% else %}{{ "{:,}".format(table.count) }} row{% if table.count == 1 %}{% else %}s{% endif %}{% endif %}</p>
|
||||
<p>{% if table.count is none %}Many rows{% else %}{{ "{:,}".format(table.count) }} row{% if table.count == 1 %}{% else %}s{% endif %}{% endif %}</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
|
@ -83,7 +95,7 @@
|
|||
{% endif %}
|
||||
|
||||
{% if allow_download %}
|
||||
<p class="download-sqlite">Download SQLite DB: <a href="{{ urls.database(database) }}.db" rel="nofollow">{{ database }}.db</a> <em>{{ format_bytes(size) }}</em></p>
|
||||
<p class="download-sqlite">Download SQLite DB: <a href="{{ urls.database(database) }}.db">{{ database }}.db</a> <em>{{ format_bytes(size) }}</em></p>
|
||||
{% endif %}
|
||||
|
||||
{% include "_codemirror_foot.html" %}
|
||||
|
|
|
|||
|
|
@ -1,43 +0,0 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Registered Actions{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<h1>Registered actions</h1>
|
||||
|
||||
{% set current_tab = "actions" %}
|
||||
{% include "_permissions_debug_tabs.html" %}
|
||||
|
||||
<p style="margin-bottom: 2em;">
|
||||
This Datasette instance has registered {{ data|length }} action{{ data|length != 1 and "s" or "" }}.
|
||||
Actions are used by the permission system to control access to different features.
|
||||
</p>
|
||||
|
||||
<table class="rows-and-columns">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
<th>Abbr</th>
|
||||
<th>Description</th>
|
||||
<th>Resource</th>
|
||||
<th>Takes Parent</th>
|
||||
<th>Takes Child</th>
|
||||
<th>Also Requires</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for action in data %}
|
||||
<tr>
|
||||
<td><strong>{{ action.name }}</strong></td>
|
||||
<td>{% if action.abbr %}<code>{{ action.abbr }}</code>{% endif %}</td>
|
||||
<td>{{ action.description or "" }}</td>
|
||||
<td>{% if action.resource_class %}<code>{{ action.resource_class }}</code>{% endif %}</td>
|
||||
<td>{% if action.takes_parent %}✓{% endif %}</td>
|
||||
<td>{% if action.takes_child %}✓{% endif %}</td>
|
||||
<td>{% if action.also_requires %}<code>{{ action.also_requires }}</code>{% endif %}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
{% endblock %}
|
||||
|
|
@ -1,229 +0,0 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Allowed Resources{% endblock %}
|
||||
|
||||
{% block extra_head %}
|
||||
<script src="{{ base_url }}-/static/json-format-highlight-1.0.1.js"></script>
|
||||
{% include "_permission_ui_styles.html" %}
|
||||
{% include "_debug_common_functions.html" %}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<h1>Allowed resources</h1>
|
||||
|
||||
{% set current_tab = "allowed" %}
|
||||
{% include "_permissions_debug_tabs.html" %}
|
||||
|
||||
<p>Use this tool to check which resources the current actor is allowed to access for a given permission action. It queries the <code>/-/allowed.json</code> API endpoint.</p>
|
||||
|
||||
{% if request.actor %}
|
||||
<p>Current actor: <strong>{{ request.actor.get("id", "anonymous") }}</strong></p>
|
||||
{% else %}
|
||||
<p>Current actor: <strong>anonymous (not logged in)</strong></p>
|
||||
{% endif %}
|
||||
|
||||
<div class="permission-form">
|
||||
<form id="allowed-form" method="get" action="{{ urls.path("-/allowed") }}">
|
||||
<div class="form-section">
|
||||
<label for="action">Action (permission name):</label>
|
||||
<select id="action" name="action" required>
|
||||
<option value="">Select an action...</option>
|
||||
{% for action_name in supported_actions %}
|
||||
<option value="{{ action_name }}">{{ action_name }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
<small>Only certain actions are supported by this endpoint</small>
|
||||
</div>
|
||||
|
||||
<div class="form-section">
|
||||
<label for="parent">Filter by parent (optional):</label>
|
||||
<input type="text" id="parent" name="parent" placeholder="e.g., database name">
|
||||
<small>Filter results to a specific parent resource</small>
|
||||
</div>
|
||||
|
||||
<div class="form-section">
|
||||
<label for="child">Filter by child (optional):</label>
|
||||
<input type="text" id="child" name="child" placeholder="e.g., table name">
|
||||
<small>Filter results to a specific child resource (requires parent to be set)</small>
|
||||
</div>
|
||||
|
||||
<div class="form-section">
|
||||
<label for="page_size">Page size:</label>
|
||||
<input type="number" id="page_size" name="page_size" value="50" min="1" max="200" style="max-width: 100px;">
|
||||
<small>Number of results per page (max 200)</small>
|
||||
</div>
|
||||
|
||||
<div class="form-actions">
|
||||
<button type="submit" class="submit-btn" id="submit-btn">Check Allowed Resources</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div id="results-container" style="display: none;">
|
||||
<div class="results-header">
|
||||
<h2>Results</h2>
|
||||
<div class="results-count" id="results-count"></div>
|
||||
</div>
|
||||
|
||||
<div id="results-content"></div>
|
||||
|
||||
<div id="pagination" class="pagination"></div>
|
||||
|
||||
<details style="margin-top: 2em;">
|
||||
<summary style="cursor: pointer; font-weight: bold;">Raw JSON response</summary>
|
||||
<pre id="raw-json" style="margin-top: 1em; padding: 1em; background-color: #f5f5f5; border: 1px solid #ddd; border-radius: 3px; overflow-x: auto;"></pre>
|
||||
</details>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
const form = document.getElementById('allowed-form');
|
||||
const resultsContainer = document.getElementById('results-container');
|
||||
const resultsContent = document.getElementById('results-content');
|
||||
const resultsCount = document.getElementById('results-count');
|
||||
const pagination = document.getElementById('pagination');
|
||||
const submitBtn = document.getElementById('submit-btn');
|
||||
const hasDebugPermission = {{ 'true' if has_debug_permission else 'false' }};
|
||||
|
||||
// Populate form on initial load
|
||||
(function() {
|
||||
const params = populateFormFromURL();
|
||||
const action = params.get('action');
|
||||
const page = params.get('page');
|
||||
if (action) {
|
||||
fetchResults(page ? parseInt(page) : 1);
|
||||
}
|
||||
})();
|
||||
|
||||
async function fetchResults(page = 1) {
|
||||
submitBtn.disabled = true;
|
||||
submitBtn.textContent = 'Loading...';
|
||||
|
||||
const formData = new FormData(form);
|
||||
const params = new URLSearchParams();
|
||||
|
||||
for (const [key, value] of formData.entries()) {
|
||||
if (value && key !== 'page_size') {
|
||||
params.append(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
const pageSize = document.getElementById('page_size').value || '50';
|
||||
params.append('page', page.toString());
|
||||
params.append('page_size', pageSize);
|
||||
|
||||
try {
|
||||
const response = await fetch('{{ urls.path("-/allowed.json") }}?' + params.toString(), {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
}
|
||||
});
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (response.ok) {
|
||||
displayResults(data);
|
||||
} else {
|
||||
displayError(data);
|
||||
}
|
||||
} catch (error) {
|
||||
displayError({ error: error.message });
|
||||
} finally {
|
||||
submitBtn.disabled = false;
|
||||
submitBtn.textContent = 'Check Allowed Resources';
|
||||
}
|
||||
}
|
||||
|
||||
function displayResults(data) {
|
||||
resultsContainer.style.display = 'block';
|
||||
|
||||
// Update count
|
||||
resultsCount.textContent = `Showing ${data.items.length} of ${data.total} total resources (page ${data.page})`;
|
||||
|
||||
// Display results table
|
||||
if (data.items.length === 0) {
|
||||
resultsContent.innerHTML = '<div class="no-results">No allowed resources found for this action.</div>';
|
||||
} else {
|
||||
let html = '<table class="results-table">';
|
||||
html += '<thead><tr>';
|
||||
html += '<th>Resource Path</th>';
|
||||
html += '<th>Parent</th>';
|
||||
html += '<th>Child</th>';
|
||||
if (hasDebugPermission) {
|
||||
html += '<th>Reason</th>';
|
||||
}
|
||||
html += '</tr></thead>';
|
||||
html += '<tbody>';
|
||||
|
||||
for (const item of data.items) {
|
||||
html += '<tr>';
|
||||
html += `<td><span class="resource-path">${escapeHtml(item.resource || '/')}</span></td>`;
|
||||
html += `<td>${escapeHtml(item.parent || '—')}</td>`;
|
||||
html += `<td>${escapeHtml(item.child || '—')}</td>`;
|
||||
if (hasDebugPermission) {
|
||||
// Display reason as JSON array
|
||||
let reasonHtml = '—';
|
||||
if (item.reason && Array.isArray(item.reason)) {
|
||||
reasonHtml = `<code>${escapeHtml(JSON.stringify(item.reason))}</code>`;
|
||||
}
|
||||
html += `<td>${reasonHtml}</td>`;
|
||||
}
|
||||
html += '</tr>';
|
||||
}
|
||||
|
||||
html += '</tbody></table>';
|
||||
resultsContent.innerHTML = html;
|
||||
}
|
||||
|
||||
// Update pagination
|
||||
pagination.innerHTML = '';
|
||||
if (data.previous_url || data.next_url) {
|
||||
if (data.previous_url) {
|
||||
const prevLink = document.createElement('a');
|
||||
prevLink.href = data.previous_url;
|
||||
prevLink.textContent = '← Previous';
|
||||
pagination.appendChild(prevLink);
|
||||
}
|
||||
|
||||
const pageInfo = document.createElement('span');
|
||||
pageInfo.textContent = `Page ${data.page}`;
|
||||
pagination.appendChild(pageInfo);
|
||||
|
||||
if (data.next_url) {
|
||||
const nextLink = document.createElement('a');
|
||||
nextLink.href = data.next_url;
|
||||
nextLink.textContent = 'Next →';
|
||||
pagination.appendChild(nextLink);
|
||||
}
|
||||
}
|
||||
|
||||
// Update raw JSON
|
||||
document.getElementById('raw-json').innerHTML = jsonFormatHighlight(data);
|
||||
}
|
||||
|
||||
function displayError(data) {
|
||||
resultsContainer.style.display = 'block';
|
||||
resultsCount.textContent = '';
|
||||
pagination.innerHTML = '';
|
||||
|
||||
resultsContent.innerHTML = `<div class="error-message">Error: ${escapeHtml(data.error || 'Unknown error')}</div>`;
|
||||
|
||||
document.getElementById('raw-json').innerHTML = jsonFormatHighlight(data);
|
||||
}
|
||||
|
||||
// Disable child input if parent is empty
|
||||
const parentInput = document.getElementById('parent');
|
||||
const childInput = document.getElementById('child');
|
||||
|
||||
parentInput.addEventListener('input', () => {
|
||||
childInput.disabled = !parentInput.value;
|
||||
if (!parentInput.value) {
|
||||
childInput.value = '';
|
||||
}
|
||||
});
|
||||
|
||||
// Initialize disabled state
|
||||
childInput.disabled = !parentInput.value;
|
||||
</script>
|
||||
|
||||
{% endblock %}
|
||||
|
|
@ -1,270 +0,0 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Permission Check{% endblock %}
|
||||
|
||||
{% block extra_head %}
|
||||
<script src="{{ base_url }}-/static/json-format-highlight-1.0.1.js"></script>
|
||||
{% include "_permission_ui_styles.html" %}
|
||||
{% include "_debug_common_functions.html" %}
|
||||
<style>
|
||||
#output {
|
||||
margin-top: 2em;
|
||||
padding: 1em;
|
||||
border-radius: 5px;
|
||||
}
|
||||
#output.allowed {
|
||||
background-color: #e8f5e9;
|
||||
border: 2px solid #4caf50;
|
||||
}
|
||||
#output.denied {
|
||||
background-color: #ffebee;
|
||||
border: 2px solid #f44336;
|
||||
}
|
||||
#output h2 {
|
||||
margin-top: 0;
|
||||
}
|
||||
#output .result-badge {
|
||||
display: inline-block;
|
||||
padding: 0.3em 0.8em;
|
||||
border-radius: 3px;
|
||||
font-weight: bold;
|
||||
font-size: 1.1em;
|
||||
}
|
||||
#output .allowed-badge {
|
||||
background-color: #4caf50;
|
||||
color: white;
|
||||
}
|
||||
#output .denied-badge {
|
||||
background-color: #f44336;
|
||||
color: white;
|
||||
}
|
||||
.details-section {
|
||||
margin-top: 1em;
|
||||
}
|
||||
.details-section dt {
|
||||
font-weight: bold;
|
||||
margin-top: 0.5em;
|
||||
}
|
||||
.details-section dd {
|
||||
margin-left: 1em;
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<h1>Permission check</h1>
|
||||
|
||||
{% set current_tab = "check" %}
|
||||
{% include "_permissions_debug_tabs.html" %}
|
||||
|
||||
<p>Use this tool to test permission checks for the current actor. It queries the <code>/-/check.json</code> API endpoint.</p>
|
||||
|
||||
{% if request.actor %}
|
||||
<p>Current actor: <strong>{{ request.actor.get("id", "anonymous") }}</strong></p>
|
||||
{% else %}
|
||||
<p>Current actor: <strong>anonymous (not logged in)</strong></p>
|
||||
{% endif %}
|
||||
|
||||
<div class="permission-form">
|
||||
<form id="check-form" method="get" action="{{ urls.path("-/check") }}">
|
||||
<div class="form-section">
|
||||
<label for="action">Action (permission name):</label>
|
||||
<select id="action" name="action" required>
|
||||
<option value="">Select an action...</option>
|
||||
{% for action_name in sorted_actions %}
|
||||
<option value="{{ action_name }}">{{ action_name }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
<small>The permission action to check</small>
|
||||
</div>
|
||||
|
||||
<div class="form-section">
|
||||
<label for="parent">Parent resource (optional):</label>
|
||||
<input type="text" id="parent" name="parent" placeholder="e.g., database name">
|
||||
<small>For database-level permissions, specify the database name</small>
|
||||
</div>
|
||||
|
||||
<div class="form-section">
|
||||
<label for="child">Child resource (optional):</label>
|
||||
<input type="text" id="child" name="child" placeholder="e.g., table name">
|
||||
<small>For table-level permissions, specify the table name (requires parent)</small>
|
||||
</div>
|
||||
|
||||
<div class="form-actions">
|
||||
<button type="submit" class="submit-btn" id="submit-btn">Check Permission</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div id="output" style="display: none;">
|
||||
<h2>Result: <span class="result-badge" id="result-badge"></span></h2>
|
||||
|
||||
<dl class="details-section">
|
||||
<dt>Action:</dt>
|
||||
<dd id="result-action"></dd>
|
||||
|
||||
<dt>Resource Path:</dt>
|
||||
<dd id="result-resource"></dd>
|
||||
|
||||
<dt>Actor ID:</dt>
|
||||
<dd id="result-actor"></dd>
|
||||
|
||||
<div id="additional-details"></div>
|
||||
</dl>
|
||||
|
||||
<details style="margin-top: 1em;">
|
||||
<summary style="cursor: pointer; font-weight: bold;">Raw JSON response</summary>
|
||||
<pre id="raw-json" style="margin-top: 1em; padding: 1em; background-color: #f5f5f5; border: 1px solid #ddd; border-radius: 3px; overflow-x: auto;"></pre>
|
||||
</details>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
const form = document.getElementById('check-form');
|
||||
const output = document.getElementById('output');
|
||||
const submitBtn = document.getElementById('submit-btn');
|
||||
|
||||
async function performCheck() {
|
||||
submitBtn.disabled = true;
|
||||
submitBtn.textContent = 'Checking...';
|
||||
|
||||
const formData = new FormData(form);
|
||||
const params = new URLSearchParams();
|
||||
|
||||
for (const [key, value] of formData.entries()) {
|
||||
if (value) {
|
||||
params.append(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch('{{ urls.path("-/check.json") }}?' + params.toString(), {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
}
|
||||
});
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (response.ok) {
|
||||
displayResult(data);
|
||||
} else {
|
||||
displayError(data);
|
||||
}
|
||||
} catch (error) {
|
||||
alert('Error: ' + error.message);
|
||||
} finally {
|
||||
submitBtn.disabled = false;
|
||||
submitBtn.textContent = 'Check Permission';
|
||||
}
|
||||
}
|
||||
|
||||
// Populate form on initial load
|
||||
(function() {
|
||||
const params = populateFormFromURL();
|
||||
const action = params.get('action');
|
||||
if (action) {
|
||||
performCheck();
|
||||
}
|
||||
})();
|
||||
|
||||
function displayResult(data) {
|
||||
output.style.display = 'block';
|
||||
|
||||
// Set badge and styling
|
||||
const resultBadge = document.getElementById('result-badge');
|
||||
if (data.allowed) {
|
||||
output.className = 'allowed';
|
||||
resultBadge.className = 'result-badge allowed-badge';
|
||||
resultBadge.textContent = 'ALLOWED ✓';
|
||||
} else {
|
||||
output.className = 'denied';
|
||||
resultBadge.className = 'result-badge denied-badge';
|
||||
resultBadge.textContent = 'DENIED ✗';
|
||||
}
|
||||
|
||||
// Basic details
|
||||
document.getElementById('result-action').textContent = data.action || 'N/A';
|
||||
document.getElementById('result-resource').textContent = data.resource?.path || '/';
|
||||
document.getElementById('result-actor').textContent = data.actor_id || 'anonymous';
|
||||
|
||||
// Additional details
|
||||
const additionalDetails = document.getElementById('additional-details');
|
||||
additionalDetails.innerHTML = '';
|
||||
|
||||
if (data.reason !== undefined) {
|
||||
const dt = document.createElement('dt');
|
||||
dt.textContent = 'Reason:';
|
||||
const dd = document.createElement('dd');
|
||||
dd.textContent = data.reason || 'N/A';
|
||||
additionalDetails.appendChild(dt);
|
||||
additionalDetails.appendChild(dd);
|
||||
}
|
||||
|
||||
if (data.source_plugin !== undefined) {
|
||||
const dt = document.createElement('dt');
|
||||
dt.textContent = 'Source Plugin:';
|
||||
const dd = document.createElement('dd');
|
||||
dd.textContent = data.source_plugin || 'N/A';
|
||||
additionalDetails.appendChild(dt);
|
||||
additionalDetails.appendChild(dd);
|
||||
}
|
||||
|
||||
if (data.used_default !== undefined) {
|
||||
const dt = document.createElement('dt');
|
||||
dt.textContent = 'Used Default:';
|
||||
const dd = document.createElement('dd');
|
||||
dd.textContent = data.used_default ? 'Yes' : 'No';
|
||||
additionalDetails.appendChild(dt);
|
||||
additionalDetails.appendChild(dd);
|
||||
}
|
||||
|
||||
if (data.depth !== undefined) {
|
||||
const dt = document.createElement('dt');
|
||||
dt.textContent = 'Depth:';
|
||||
const dd = document.createElement('dd');
|
||||
dd.textContent = data.depth;
|
||||
additionalDetails.appendChild(dt);
|
||||
additionalDetails.appendChild(dd);
|
||||
}
|
||||
|
||||
// Raw JSON
|
||||
document.getElementById('raw-json').innerHTML = jsonFormatHighlight(data);
|
||||
|
||||
// Scroll to output
|
||||
output.scrollIntoView({ behavior: 'smooth', block: 'nearest' });
|
||||
}
|
||||
|
||||
function displayError(data) {
|
||||
output.style.display = 'block';
|
||||
output.className = 'denied';
|
||||
|
||||
const resultBadge = document.getElementById('result-badge');
|
||||
resultBadge.className = 'result-badge denied-badge';
|
||||
resultBadge.textContent = 'ERROR';
|
||||
|
||||
document.getElementById('result-action').textContent = 'N/A';
|
||||
document.getElementById('result-resource').textContent = 'N/A';
|
||||
document.getElementById('result-actor').textContent = 'N/A';
|
||||
|
||||
const additionalDetails = document.getElementById('additional-details');
|
||||
additionalDetails.innerHTML = '<dt>Error:</dt><dd>' + (data.error || 'Unknown error') + '</dd>';
|
||||
|
||||
document.getElementById('raw-json').innerHTML = jsonFormatHighlight(data);
|
||||
|
||||
output.scrollIntoView({ behavior: 'smooth', block: 'nearest' });
|
||||
}
|
||||
|
||||
// Disable child input if parent is empty
|
||||
const parentInput = document.getElementById('parent');
|
||||
const childInput = document.getElementById('child');
|
||||
|
||||
childInput.addEventListener('focus', () => {
|
||||
if (!parentInput.value) {
|
||||
alert('Please specify a parent resource first before adding a child resource.');
|
||||
parentInput.focus();
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
{% endblock %}
|
||||
|
|
@ -1,166 +0,0 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Debug permissions{% endblock %}
|
||||
|
||||
{% block extra_head %}
|
||||
{% include "_permission_ui_styles.html" %}
|
||||
<style type="text/css">
|
||||
.check-result-true {
|
||||
color: green;
|
||||
}
|
||||
.check-result-false {
|
||||
color: red;
|
||||
}
|
||||
.check-result-no-opinion {
|
||||
color: #aaa;
|
||||
}
|
||||
.check h2 {
|
||||
font-size: 1em
|
||||
}
|
||||
.check-action, .check-when, .check-result {
|
||||
font-size: 1.3em;
|
||||
}
|
||||
textarea {
|
||||
height: 10em;
|
||||
width: 95%;
|
||||
box-sizing: border-box;
|
||||
padding: 0.5em;
|
||||
border: 2px dotted black;
|
||||
}
|
||||
.two-col {
|
||||
display: inline-block;
|
||||
width: 48%;
|
||||
}
|
||||
.two-col label {
|
||||
width: 48%;
|
||||
}
|
||||
@media only screen and (max-width: 576px) {
|
||||
.two-col {
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<h1>Permission playground</h1>
|
||||
|
||||
{% set current_tab = "permissions" %}
|
||||
{% include "_permissions_debug_tabs.html" %}
|
||||
|
||||
<p>This tool lets you simulate an actor and a permission check for that actor.</p>
|
||||
|
||||
<div class="permission-form">
|
||||
<form action="{{ urls.path('-/permissions') }}" id="debug-post" method="post">
|
||||
<input type="hidden" name="csrftoken" value="{{ csrftoken() }}">
|
||||
<div class="two-col">
|
||||
<div class="form-section">
|
||||
<label>Actor</label>
|
||||
<textarea name="actor">{% if actor_input %}{{ actor_input }}{% else %}{"id": "root"}{% endif %}</textarea>
|
||||
</div>
|
||||
</div>
|
||||
<div class="two-col" style="vertical-align: top">
|
||||
<div class="form-section">
|
||||
<label for="permission">Action</label>
|
||||
<select name="permission" id="permission">
|
||||
{% for permission in permissions %}
|
||||
<option value="{{ permission.name }}">{{ permission.name }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
</div>
|
||||
<div class="form-section">
|
||||
<label for="resource_1">Parent</label>
|
||||
<input type="text" id="resource_1" name="resource_1" placeholder="e.g., database name">
|
||||
</div>
|
||||
<div class="form-section">
|
||||
<label for="resource_2">Child</label>
|
||||
<input type="text" id="resource_2" name="resource_2" placeholder="e.g., table name">
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-actions">
|
||||
<button type="submit" class="submit-btn">Simulate permission check</button>
|
||||
</div>
|
||||
<pre style="margin-top: 1em" id="debugResult"></pre>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
var rawPerms = {{ permissions|tojson }};
|
||||
var permissions = Object.fromEntries(rawPerms.map(p => [p.name, p]));
|
||||
var permissionSelect = document.getElementById('permission');
|
||||
var resource1 = document.getElementById('resource_1');
|
||||
var resource2 = document.getElementById('resource_2');
|
||||
var resource1Section = resource1.closest('.form-section');
|
||||
var resource2Section = resource2.closest('.form-section');
|
||||
function updateResourceVisibility() {
|
||||
var permission = permissionSelect.value;
|
||||
var {takes_parent, takes_child} = permissions[permission];
|
||||
resource1Section.style.display = takes_parent ? 'block' : 'none';
|
||||
resource2Section.style.display = takes_child ? 'block' : 'none';
|
||||
}
|
||||
permissionSelect.addEventListener('change', updateResourceVisibility);
|
||||
updateResourceVisibility();
|
||||
|
||||
// When #debug-post form is submitted, use fetch() to POST data
|
||||
var debugPost = document.getElementById('debug-post');
|
||||
var debugResult = document.getElementById('debugResult');
|
||||
debugPost.addEventListener('submit', function(ev) {
|
||||
ev.preventDefault();
|
||||
var formData = new FormData(debugPost);
|
||||
fetch(debugPost.action, {
|
||||
method: 'POST',
|
||||
body: new URLSearchParams(formData),
|
||||
headers: {
|
||||
'Accept': 'application/json'
|
||||
}
|
||||
}).then(function(response) {
|
||||
if (!response.ok) {
|
||||
throw new Error('Request failed with status ' + response.status);
|
||||
}
|
||||
return response.json();
|
||||
}).then(function(data) {
|
||||
debugResult.innerText = JSON.stringify(data, null, 4);
|
||||
}).catch(function(error) {
|
||||
debugResult.innerText = JSON.stringify({ error: error.message }, null, 4);
|
||||
});
|
||||
});
|
||||
</script>
|
||||
|
||||
<h1>Recent permissions checks</h1>
|
||||
|
||||
<p>
|
||||
{% if filter != "all" %}<a href="?filter=all">All</a>{% else %}<strong>All</strong>{% endif %},
|
||||
{% if filter != "exclude-yours" %}<a href="?filter=exclude-yours">Exclude yours</a>{% else %}<strong>Exclude yours</strong>{% endif %},
|
||||
{% if filter != "only-yours" %}<a href="?filter=only-yours">Only yours</a>{% else %}<strong>Only yours</strong>{% endif %}
|
||||
</p>
|
||||
|
||||
{% if permission_checks %}
|
||||
<table class="rows-and-columns permission-checks-table" id="permission-checks-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>When</th>
|
||||
<th>Action</th>
|
||||
<th>Parent</th>
|
||||
<th>Child</th>
|
||||
<th>Actor</th>
|
||||
<th>Result</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for check in permission_checks %}
|
||||
<tr>
|
||||
<td><span style="font-size: 0.8em">{{ check.when.split('T', 1)[0] }}</span><br>{{ check.when.split('T', 1)[1].split('+', 1)[0].split('-', 1)[0].split('Z', 1)[0] }}</td>
|
||||
<td><code>{{ check.action }}</code></td>
|
||||
<td>{{ check.parent or '—' }}</td>
|
||||
<td>{{ check.child or '—' }}</td>
|
||||
<td>{% if check.actor %}<code>{{ check.actor|tojson }}</code>{% else %}<span class="check-actor-anon">anonymous</span>{% endif %}</td>
|
||||
<td>{% if check.result %}<span class="check-result check-result-true">Allowed</span>{% elif check.result is none %}<span class="check-result check-result-no-opinion">No opinion</span>{% else %}<span class="check-result check-result-false">Denied</span>{% endif %}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
{% else %}
|
||||
<p class="no-results">No permission checks have been recorded yet.</p>
|
||||
{% endif %}
|
||||
|
||||
{% endblock %}
|
||||
|
|
@ -1,203 +0,0 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Permission Rules{% endblock %}
|
||||
|
||||
{% block extra_head %}
|
||||
<script src="{{ base_url }}-/static/json-format-highlight-1.0.1.js"></script>
|
||||
{% include "_permission_ui_styles.html" %}
|
||||
{% include "_debug_common_functions.html" %}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<h1>Permission rules</h1>
|
||||
|
||||
{% set current_tab = "rules" %}
|
||||
{% include "_permissions_debug_tabs.html" %}
|
||||
|
||||
<p>Use this tool to view the permission rules that allow the current actor to access resources for a given permission action. It queries the <code>/-/rules.json</code> API endpoint.</p>
|
||||
|
||||
{% if request.actor %}
|
||||
<p>Current actor: <strong>{{ request.actor.get("id", "anonymous") }}</strong></p>
|
||||
{% else %}
|
||||
<p>Current actor: <strong>anonymous (not logged in)</strong></p>
|
||||
{% endif %}
|
||||
|
||||
<div class="permission-form">
|
||||
<form id="rules-form" method="get" action="{{ urls.path("-/rules") }}">
|
||||
<div class="form-section">
|
||||
<label for="action">Action (permission name):</label>
|
||||
<select id="action" name="action" required>
|
||||
<option value="">Select an action...</option>
|
||||
{% for action_name in sorted_actions %}
|
||||
<option value="{{ action_name }}">{{ action_name }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
<small>The permission action to check</small>
|
||||
</div>
|
||||
|
||||
<div class="form-section">
|
||||
<label for="page_size">Page size:</label>
|
||||
<input type="number" id="page_size" name="page_size" value="50" min="1" max="200" style="max-width: 100px;">
|
||||
<small>Number of results per page (max 200)</small>
|
||||
</div>
|
||||
|
||||
<div class="form-actions">
|
||||
<button type="submit" class="submit-btn" id="submit-btn">View Permission Rules</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div id="results-container" style="display: none;">
|
||||
<div class="results-header">
|
||||
<h2>Results</h2>
|
||||
<div class="results-count" id="results-count"></div>
|
||||
</div>
|
||||
|
||||
<div id="results-content"></div>
|
||||
|
||||
<div id="pagination" class="pagination"></div>
|
||||
|
||||
<details style="margin-top: 2em;">
|
||||
<summary style="cursor: pointer; font-weight: bold;">Raw JSON response</summary>
|
||||
<pre id="raw-json" style="margin-top: 1em; padding: 1em; background-color: #f5f5f5; border: 1px solid #ddd; border-radius: 3px; overflow-x: auto;"></pre>
|
||||
</details>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
const form = document.getElementById('rules-form');
|
||||
const resultsContainer = document.getElementById('results-container');
|
||||
const resultsContent = document.getElementById('results-content');
|
||||
const resultsCount = document.getElementById('results-count');
|
||||
const pagination = document.getElementById('pagination');
|
||||
const submitBtn = document.getElementById('submit-btn');
|
||||
|
||||
// Populate form on initial load
|
||||
(function() {
|
||||
const params = populateFormFromURL();
|
||||
const action = params.get('action');
|
||||
const page = params.get('page');
|
||||
if (action) {
|
||||
fetchResults(page ? parseInt(page) : 1);
|
||||
}
|
||||
})();
|
||||
|
||||
async function fetchResults(page = 1) {
|
||||
submitBtn.disabled = true;
|
||||
submitBtn.textContent = 'Loading...';
|
||||
|
||||
const formData = new FormData(form);
|
||||
const params = new URLSearchParams();
|
||||
|
||||
for (const [key, value] of formData.entries()) {
|
||||
if (value && key !== 'page_size') {
|
||||
params.append(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
const pageSize = document.getElementById('page_size').value || '50';
|
||||
params.append('page', page.toString());
|
||||
params.append('page_size', pageSize);
|
||||
|
||||
try {
|
||||
const response = await fetch('{{ urls.path("-/rules.json") }}?' + params.toString(), {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
}
|
||||
});
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (response.ok) {
|
||||
displayResults(data);
|
||||
} else {
|
||||
displayError(data);
|
||||
}
|
||||
} catch (error) {
|
||||
displayError({ error: error.message });
|
||||
} finally {
|
||||
submitBtn.disabled = false;
|
||||
submitBtn.textContent = 'View Permission Rules';
|
||||
}
|
||||
}
|
||||
|
||||
function displayResults(data) {
|
||||
resultsContainer.style.display = 'block';
|
||||
|
||||
// Update count
|
||||
resultsCount.textContent = `Showing ${data.items.length} of ${data.total} total rules (page ${data.page})`;
|
||||
|
||||
// Display results table
|
||||
if (data.items.length === 0) {
|
||||
resultsContent.innerHTML = '<div class="no-results">No permission rules found for this action.</div>';
|
||||
} else {
|
||||
let html = '<table class="results-table">';
|
||||
html += '<thead><tr>';
|
||||
html += '<th>Effect</th>';
|
||||
html += '<th>Resource Path</th>';
|
||||
html += '<th>Parent</th>';
|
||||
html += '<th>Child</th>';
|
||||
html += '<th>Source Plugin</th>';
|
||||
html += '<th>Reason</th>';
|
||||
html += '</tr></thead>';
|
||||
html += '<tbody>';
|
||||
|
||||
for (const item of data.items) {
|
||||
const rowClass = item.allow ? 'allow-row' : 'deny-row';
|
||||
const effectBadge = item.allow
|
||||
? '<span style="background: #4caf50; color: white; padding: 0.2em 0.5em; border-radius: 3px; font-weight: bold;">ALLOW</span>'
|
||||
: '<span style="background: #f44336; color: white; padding: 0.2em 0.5em; border-radius: 3px; font-weight: bold;">DENY</span>';
|
||||
|
||||
html += `<tr class="${rowClass}">`;
|
||||
html += `<td>${effectBadge}</td>`;
|
||||
html += `<td><span class="resource-path">${escapeHtml(item.resource || '/')}</span></td>`;
|
||||
html += `<td>${escapeHtml(item.parent || '—')}</td>`;
|
||||
html += `<td>${escapeHtml(item.child || '—')}</td>`;
|
||||
html += `<td>${escapeHtml(item.source_plugin || '—')}</td>`;
|
||||
html += `<td>${escapeHtml(item.reason || '—')}</td>`;
|
||||
html += '</tr>';
|
||||
}
|
||||
|
||||
html += '</tbody></table>';
|
||||
resultsContent.innerHTML = html;
|
||||
}
|
||||
|
||||
// Update pagination
|
||||
pagination.innerHTML = '';
|
||||
if (data.previous_url || data.next_url) {
|
||||
if (data.previous_url) {
|
||||
const prevLink = document.createElement('a');
|
||||
prevLink.href = data.previous_url;
|
||||
prevLink.textContent = '← Previous';
|
||||
pagination.appendChild(prevLink);
|
||||
}
|
||||
|
||||
const pageInfo = document.createElement('span');
|
||||
pageInfo.textContent = `Page ${data.page}`;
|
||||
pagination.appendChild(pageInfo);
|
||||
|
||||
if (data.next_url) {
|
||||
const nextLink = document.createElement('a');
|
||||
nextLink.href = data.next_url;
|
||||
nextLink.textContent = 'Next →';
|
||||
pagination.appendChild(nextLink);
|
||||
}
|
||||
}
|
||||
|
||||
// Update raw JSON
|
||||
document.getElementById('raw-json').innerHTML = jsonFormatHighlight(data);
|
||||
}
|
||||
|
||||
function displayError(data) {
|
||||
resultsContainer.style.display = 'block';
|
||||
resultsCount.textContent = '';
|
||||
pagination.innerHTML = '';
|
||||
|
||||
resultsContent.innerHTML = `<div class="error-message">Error: ${escapeHtml(data.error || 'Unknown error')}</div>`;
|
||||
|
||||
document.getElementById('raw-json').innerHTML = jsonFormatHighlight(data);
|
||||
}
|
||||
|
||||
</script>
|
||||
|
||||
{% endblock %}
|
||||
|
|
@ -2,26 +2,17 @@
|
|||
|
||||
{% block title %}{{ metadata.title or "Datasette" }}: {% for database in databases %}{{ database.name }}{% if not loop.last %}, {% endif %}{% endfor %}{% endblock %}
|
||||
|
||||
{% block extra_head %}
|
||||
{% if noindex %}<meta name="robots" content="noindex">{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block body_class %}index{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<h1>{{ metadata.title or "Datasette" }}{% if private %} 🔒{% endif %}</h1>
|
||||
|
||||
{% set action_links, action_title = homepage_actions, "Homepage actions" %}
|
||||
{% include "_action_menu.html" %}
|
||||
|
||||
{{ top_homepage() }}
|
||||
|
||||
{% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %}
|
||||
|
||||
{% for database in databases %}
|
||||
<h2 style="padding-left: 10px; border-left: 10px solid #{{ database.color }}"><a href="{{ urls.database(database.name) }}">{{ database.name }}</a>{% if database.private %} 🔒{% endif %}</h2>
|
||||
<p>
|
||||
{% if database.show_table_row_counts %}{{ "{:,}".format(database.table_rows_sum) }} rows in {% endif %}{{ database.tables_count }} table{% if database.tables_count != 1 %}s{% endif %}{% if database.hidden_tables_count %}, {% endif -%}
|
||||
{% if database.show_table_row_counts %}{{ "{:,}".format(database.table_rows_sum) }} rows in {% endif %}{{ database.tables_count }} table{% if database.tables_count != 1 %}s{% endif %}{% if database.tables_count and database.hidden_tables_count %}, {% endif -%}
|
||||
{% if database.hidden_tables_count -%}
|
||||
{% if database.show_table_row_counts %}{{ "{:,}".format(database.hidden_table_rows_sum) }} rows in {% endif %}{{ database.hidden_tables_count }} hidden table{% if database.hidden_tables_count != 1 %}s{% endif -%}
|
||||
{% endif -%}
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@
|
|||
|
||||
<p>You are logged in as <strong>{{ display_actor(actor) }}</strong></p>
|
||||
|
||||
<form class="core" action="{{ urls.logout() }}" method="post">
|
||||
<form action="{{ urls.logout() }}" method="post">
|
||||
<div>
|
||||
<input type="hidden" name="csrftoken" value="{{ csrftoken() }}">
|
||||
<input type="submit" value="Log out">
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@
|
|||
|
||||
<p>Set a message:</p>
|
||||
|
||||
<form class="core" action="{{ urls.path('-/messages') }}" method="post">
|
||||
<form action="{{ urls.path('-/messages') }}" method="post">
|
||||
<div>
|
||||
<input type="text" name="message" style="width: 40%">
|
||||
<div class="select-wrapper">
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<html>
|
||||
<head>
|
||||
<title>Datasette: Pattern Portfolio</title>
|
||||
<link rel="stylesheet" href="{{ base_url }}-/static/app.css?{{ app_css_hash }}">
|
||||
|
|
@ -9,7 +9,7 @@
|
|||
</head>
|
||||
<body>
|
||||
|
||||
<header class="hd"><nav>
|
||||
<header><nav>
|
||||
<p class="crumbs">
|
||||
<a href="/">home</a>
|
||||
</p>
|
||||
|
|
@ -26,7 +26,7 @@
|
|||
<li><a href="/-/plugins">Installed plugins</a></li>
|
||||
<li><a href="/-/versions">Version info</a></li>
|
||||
</ul>
|
||||
<form class="nav-menu-logout" action="/-/logout" method="post">
|
||||
<form action="/-/logout" method="post">
|
||||
<button class="button-as-link">Log out</button>
|
||||
</form>
|
||||
</div>
|
||||
|
|
@ -45,7 +45,7 @@
|
|||
|
||||
<h2 class="pattern-heading">Header for /database/table/row and Messages</h2>
|
||||
|
||||
<header class="hd">
|
||||
<header>
|
||||
<nav>
|
||||
<p class="crumbs">
|
||||
<a href="/">home</a> /
|
||||
|
|
@ -96,24 +96,18 @@
|
|||
<section class="content">
|
||||
<div class="page-header" style="border-color: #ff0000">
|
||||
<h1>fixtures</h1>
|
||||
</div>
|
||||
<div class="page-action-menu">
|
||||
<details class="actions-menu-links details-menu">
|
||||
<summary>
|
||||
<div class="icon-text">
|
||||
<svg class="icon" aria-labelledby="actions-menu-links-title" role="img" style="color: #fff" xmlns="http://www.w3.org/2000/svg" width="28" height="28" viewBox="0 0 28 28" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||
<title id="actions-menu-links-title">Database actions</title>
|
||||
<circle cx="12" cy="12" r="3"></circle>
|
||||
<path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z"></path>
|
||||
</svg>
|
||||
<span>Database actions</span>
|
||||
</div>
|
||||
</summary>
|
||||
<summary><svg aria-labelledby="actions-menu-links-title" role="img"
|
||||
style="color: #666" xmlns="http://www.w3.org/2000/svg"
|
||||
width="28" height="28" viewBox="0 0 24 24" fill="none"
|
||||
stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||
<title id="actions-menu-links-title">Table actions</title>
|
||||
<circle cx="12" cy="12" r="3"></circle>
|
||||
<path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z"></path>
|
||||
</svg></summary>
|
||||
<div class="dropdown-menu">
|
||||
<div class="hook"></div>
|
||||
<ul>
|
||||
<li><a href="#">Action one</a></li>
|
||||
<li><a href="#">Action two</a></li>
|
||||
<li><a href="#">Database action</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
</details>
|
||||
|
|
@ -164,24 +158,18 @@
|
|||
<section class="content">
|
||||
<div class="page-header" style="border-color: #ff0000">
|
||||
<h1>roadside_attraction_characteristics</h1>
|
||||
</div>
|
||||
<div class="page-action-menu">
|
||||
<details class="actions-menu-links details-menu">
|
||||
<summary>
|
||||
<div class="icon-text">
|
||||
<svg class="icon" aria-labelledby="actions-menu-links-title" role="img" style="color: #fff" xmlns="http://www.w3.org/2000/svg" width="28" height="28" viewBox="0 0 28 28" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||
<title id="actions-menu-links-title">Database actions</title>
|
||||
<circle cx="12" cy="12" r="3"></circle>
|
||||
<path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z"></path>
|
||||
</svg>
|
||||
<span>Table actions</span>
|
||||
</div>
|
||||
</summary>
|
||||
<summary><svg aria-labelledby="actions-menu-links-title" role="img"
|
||||
style="color: #666" xmlns="http://www.w3.org/2000/svg"
|
||||
width="28" height="28" viewBox="0 0 24 24" fill="none"
|
||||
stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||
<title id="actions-menu-links-title">Table actions</title>
|
||||
<circle cx="12" cy="12" r="3"></circle>
|
||||
<path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z"></path>
|
||||
</svg></summary>
|
||||
<div class="dropdown-menu">
|
||||
<div class="hook"></div>
|
||||
<ul>
|
||||
<li><a href="#">Action one</a></li>
|
||||
<li><a href="#">Action two</a></li>
|
||||
<li><a href="#">Table action</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
</details>
|
||||
|
|
|
|||
139
datasette/templates/permissions_debug.html
Normal file
139
datasette/templates/permissions_debug.html
Normal file
|
|
@ -0,0 +1,139 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Debug permissions{% endblock %}
|
||||
|
||||
{% block extra_head %}
|
||||
<style type="text/css">
|
||||
.check-result-true {
|
||||
color: green;
|
||||
}
|
||||
.check-result-false {
|
||||
color: red;
|
||||
}
|
||||
.check-result-no-opinion {
|
||||
color: #aaa;
|
||||
}
|
||||
.check h2 {
|
||||
font-size: 1em
|
||||
}
|
||||
.check-action, .check-when, .check-result {
|
||||
font-size: 1.3em;
|
||||
}
|
||||
textarea {
|
||||
height: 10em;
|
||||
width: 95%;
|
||||
box-sizing: border-box;
|
||||
padding: 0.5em;
|
||||
border: 2px dotted black;
|
||||
}
|
||||
.two-col {
|
||||
display: inline-block;
|
||||
width: 48%;
|
||||
}
|
||||
.two-col label {
|
||||
width: 48%;
|
||||
}
|
||||
@media only screen and (max-width: 576px) {
|
||||
.two-col {
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
|
||||
<h1>Permission check testing tool</h1>
|
||||
|
||||
<p>This tool lets you simulate an actor and a permission check for that actor.</p>
|
||||
|
||||
<form action="{{ urls.path('-/permissions') }}" id="debug-post" method="post" style="margin-bottom: 1em">
|
||||
<input type="hidden" name="csrftoken" value="{{ csrftoken() }}">
|
||||
<div class="two-col">
|
||||
<p><label>Actor</label></p>
|
||||
<textarea name="actor">{% if actor_input %}{{ actor_input }}{% else %}{"id": "root"}{% endif %}</textarea>
|
||||
</div>
|
||||
<div class="two-col" style="vertical-align: top">
|
||||
<p><label for="permission" style="display:block">Permission</label>
|
||||
<select name="permission" id="permission">
|
||||
{% for permission in permissions %}
|
||||
<option value="{{ permission.0 }}">{{ permission.name }} (default {{ permission.default }})</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
<p><label for="resource_1">Database name</label><input type="text" id="resource_1" name="resource_1"></p>
|
||||
<p><label for="resource_2">Table or query name</label><input type="text" id="resource_2" name="resource_2"></p>
|
||||
</div>
|
||||
<div style="margin-top: 1em;">
|
||||
<input type="submit" value="Simulate permission check">
|
||||
</div>
|
||||
<pre style="margin-top: 1em" id="debugResult"></pre>
|
||||
</form>
|
||||
|
||||
<script>
|
||||
var rawPerms = {{ permissions|tojson }};
|
||||
var permissions = Object.fromEntries(rawPerms.map(([label, abbr, needs_resource_1, needs_resource_2, def]) => [label, {needs_resource_1, needs_resource_2, def}]))
|
||||
var permissionSelect = document.getElementById('permission');
|
||||
var resource1 = document.getElementById('resource_1');
|
||||
var resource2 = document.getElementById('resource_2');
|
||||
function updateResourceVisibility() {
|
||||
var permission = permissionSelect.value;
|
||||
var {needs_resource_1, needs_resource_2} = permissions[permission];
|
||||
if (needs_resource_1) {
|
||||
resource1.closest('p').style.display = 'block';
|
||||
} else {
|
||||
resource1.closest('p').style.display = 'none';
|
||||
}
|
||||
if (needs_resource_2) {
|
||||
resource2.closest('p').style.display = 'block';
|
||||
} else {
|
||||
resource2.closest('p').style.display = 'none';
|
||||
}
|
||||
}
|
||||
permissionSelect.addEventListener('change', updateResourceVisibility);
|
||||
updateResourceVisibility();
|
||||
|
||||
// When #debug-post form is submitted, use fetch() to POST data
|
||||
var debugPost = document.getElementById('debug-post');
|
||||
var debugResult = document.getElementById('debugResult');
|
||||
debugPost.addEventListener('submit', function(ev) {
|
||||
ev.preventDefault();
|
||||
var formData = new FormData(debugPost);
|
||||
console.log(formData);
|
||||
fetch(debugPost.action, {
|
||||
method: 'POST',
|
||||
body: new URLSearchParams(formData),
|
||||
}).then(function(response) {
|
||||
return response.json();
|
||||
}).then(function(data) {
|
||||
debugResult.innerText = JSON.stringify(data, null, 4);
|
||||
});
|
||||
});
|
||||
</script>
|
||||
|
||||
<h1>Recent permissions checks</h1>
|
||||
|
||||
{% for check in permission_checks %}
|
||||
<div class="check">
|
||||
<h2>
|
||||
<span class="check-action">{{ check.action }}</span>
|
||||
checked at
|
||||
<span class="check-when">{{ check.when }}</span>
|
||||
{% if check.result %}
|
||||
<span class="check-result check-result-true">✓</span>
|
||||
{% elif check.result is none %}
|
||||
<span class="check-result check-result-no-opinion">none</span>
|
||||
{% else %}
|
||||
<span class="check-result check-result-false">✗</span>
|
||||
{% endif %}
|
||||
{% if check.used_default %}
|
||||
<span class="check-used-default">(used default)</span>
|
||||
{% endif %}
|
||||
</h2>
|
||||
<p><strong>Actor:</strong> {{ check.actor|tojson }}</p>
|
||||
{% if check.resource %}
|
||||
<p><strong>Resource:</strong> {{ check.resource }}</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endfor %}
|
||||
|
||||
{% endblock %}
|
||||
|
|
@ -24,19 +24,15 @@
|
|||
|
||||
{% block content %}
|
||||
|
||||
{% if canned_query_write and db_is_immutable %}
|
||||
{% if canned_write and db_is_immutable %}
|
||||
<p class="message-error">This query cannot be executed because the database is immutable.</p>
|
||||
{% endif %}
|
||||
|
||||
<h1 style="padding-left: 10px; border-left: 10px solid #{{ database_color }}">{{ metadata.title or database }}{% if canned_query and not metadata.title %}: {{ canned_query }}{% endif %}{% if private %} 🔒{% endif %}</h1>
|
||||
{% set action_links, action_title = query_actions(), "Query actions" %}
|
||||
{% include "_action_menu.html" %}
|
||||
|
||||
{% if canned_query %}{{ top_canned_query() }}{% else %}{{ top_query() }}{% endif %}
|
||||
<h1 style="padding-left: 10px; border-left: 10px solid #{{ database_color(database) }}">{{ metadata.title or database }}{% if canned_query and not metadata.title %}: {{ canned_query }}{% endif %}{% if private %} 🔒{% endif %}</h1>
|
||||
|
||||
{% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %}
|
||||
|
||||
<form class="sql core" action="{{ urls.database(database) }}{% if canned_query %}/{{ canned_query }}{% endif %}" method="{% if canned_query_write %}post{% else %}get{% endif %}">
|
||||
<form class="sql" action="{{ urls.database(database) }}{% if canned_query %}/{{ canned_query }}{% endif %}" method="{% if canned_write %}post{% else %}get{% endif %}">
|
||||
<h3>Custom SQL query{% if display_rows %} returning {% if truncated %}more than {% endif %}{{ "{:,}".format(display_rows|length) }} row{% if display_rows|length == 1 %}{% else %}s{% endif %}{% endif %}{% if not query_error %}
|
||||
<span class="show-hide-sql">(<a href="{{ show_hide_link }}">{{ show_hide_text }}</a>)</span>
|
||||
{% endif %}</h3>
|
||||
|
|
@ -65,8 +61,8 @@
|
|||
{% endif %}
|
||||
<p>
|
||||
{% if not hide_sql %}<button id="sql-format" type="button" hidden>Format SQL</button>{% endif %}
|
||||
{% if canned_query_write %}<input type="hidden" name="csrftoken" value="{{ csrftoken() }}">{% endif %}
|
||||
<input type="submit" value="Run SQL"{% if canned_query_write and db_is_immutable %} disabled{% endif %}>
|
||||
{% if canned_write %}<input type="hidden" name="csrftoken" value="{{ csrftoken() }}">{% endif %}
|
||||
<input type="submit" value="Run SQL"{% if canned_write and db_is_immutable %} disabled{% endif %}>
|
||||
{{ show_hide_hidden }}
|
||||
{% if canned_query and edit_sql_url %}<a href="{{ edit_sql_url }}" class="canned-query-edit-sql">Edit SQL</a>{% endif %}
|
||||
</p>
|
||||
|
|
@ -91,7 +87,7 @@
|
|||
</tbody>
|
||||
</table></div>
|
||||
{% else %}
|
||||
{% if not canned_query_write and not error %}
|
||||
{% if not canned_write and not error %}
|
||||
<p class="zero-results">0 results</p>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
|
|
|||
|
|
@ -20,12 +20,7 @@
|
|||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<h1 style="padding-left: 10px; border-left: 10px solid #{{ database_color }}">{{ table }}: {{ ', '.join(primary_key_values) }}{% if private %} 🔒{% endif %}</h1>
|
||||
|
||||
{% set action_links, action_title = row_actions, "Row actions" %}
|
||||
{% include "_action_menu.html" %}
|
||||
|
||||
{{ top_row() }}
|
||||
<h1 style="padding-left: 10px; border-left: 10px solid #{{ database_color(database) }}">{{ table }}: {{ ', '.join(primary_key_values) }}{% if private %} 🔒{% endif %}</h1>
|
||||
|
||||
{% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,41 +0,0 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}{% if is_instance %}Schema for all databases{% elif table_name %}Schema for {{ schemas[0].database }}.{{ table_name }}{% else %}Schema for {{ schemas[0].database }}{% endif %}{% endblock %}
|
||||
|
||||
{% block body_class %}schema{% endblock %}
|
||||
|
||||
{% block crumbs %}
|
||||
{% if is_instance %}
|
||||
{{ crumbs.nav(request=request) }}
|
||||
{% elif table_name %}
|
||||
{{ crumbs.nav(request=request, database=schemas[0].database, table=table_name) }}
|
||||
{% else %}
|
||||
{{ crumbs.nav(request=request, database=schemas[0].database) }}
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="page-header">
|
||||
<h1>{% if is_instance %}Schema for all databases{% elif table_name %}Schema for {{ table_name }}{% else %}Schema for {{ schemas[0].database }}{% endif %}</h1>
|
||||
</div>
|
||||
|
||||
{% for item in schemas %}
|
||||
{% if is_instance %}
|
||||
<h2>{{ item.database }}</h2>
|
||||
{% endif %}
|
||||
|
||||
{% if item.schema %}
|
||||
<pre style="background-color: #f5f5f5; padding: 1em; overflow-x: auto; border: 1px solid #ddd; border-radius: 4px;"><code>{{ item.schema }}</code></pre>
|
||||
{% else %}
|
||||
<p><em>No schema available for this database.</em></p>
|
||||
{% endif %}
|
||||
|
||||
{% if not loop.last %}
|
||||
<hr style="margin: 2em 0;">
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
{% if not schemas %}
|
||||
<p><em>No databases with viewable schemas found.</em></p>
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}{{ database }}: {{ table }}: {% if count or count == 0 %}{{ "{:,}".format(count) }} row{% if count == 1 %}{% else %}s{% endif %}{% endif %}{% if human_description_en %} {{ human_description_en }}{% endif %}{% endblock %}
|
||||
{% block title %}{{ database }}: {{ table }}: {% if filtered_table_rows_count or filtered_table_rows_count == 0 %}{{ "{:,}".format(filtered_table_rows_count) }} row{% if filtered_table_rows_count == 1 %}{% else %}s{% endif %}{% endif %}{% if human_description_en %} {{ human_description_en }}{% endif %}{% endblock %}
|
||||
|
||||
{% block extra_head %}
|
||||
{{- super() -}}
|
||||
|
|
@ -17,21 +17,37 @@
|
|||
{% block body_class %}table db-{{ database|to_css_class }} table-{{ table|to_css_class }}{% endblock %}
|
||||
|
||||
{% block crumbs %}
|
||||
{{ crumbs.nav(request=request, database=database, table=table) }}
|
||||
{{ crumbs.nav(request=request, database=database) }}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="page-header" style="border-color: #{{ database_color }}">
|
||||
<h1>{{ metadata.get("title") or table }}{% if is_view %} (view){% endif %}{% if private %} 🔒{% endif %}</h1>
|
||||
<div class="page-header" style="border-color: #{{ database_color(database) }}">
|
||||
<h1>{{ metadata.title or table }}{% if is_view %} (view){% endif %}{% if private %} 🔒{% endif %}</h1>
|
||||
{% set links = table_actions() %}{% if links %}
|
||||
<details class="actions-menu-links details-menu">
|
||||
<summary><svg aria-labelledby="actions-menu-links-title" role="img"
|
||||
style="color: #666" xmlns="http://www.w3.org/2000/svg"
|
||||
width="28" height="28" viewBox="0 0 24 24" fill="none"
|
||||
stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||
<title id="actions-menu-links-title">Table actions</title>
|
||||
<circle cx="12" cy="12" r="3"></circle>
|
||||
<path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z"></path>
|
||||
</svg></summary>
|
||||
<div class="dropdown-menu">
|
||||
{% if links %}
|
||||
<ul>
|
||||
{% for link in links %}
|
||||
<li><a href="{{ link.href }}">{{ link.label }}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
</div>
|
||||
</details>{% endif %}
|
||||
</div>
|
||||
{% set action_links, action_title = actions(), "View actions" if is_view else "Table actions" %}
|
||||
{% include "_action_menu.html" %}
|
||||
|
||||
{{ top_table() }}
|
||||
|
||||
{% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %}
|
||||
|
||||
{% if metadata.get("columns") %}
|
||||
{% if metadata.columns %}
|
||||
<dl class="column-descriptions">
|
||||
{% for column_name, column_description in metadata.columns.items() %}
|
||||
<dt>{{ column_name }}</dt><dd>{{ column_description }}</dd>
|
||||
|
|
@ -39,16 +55,13 @@
|
|||
</dl>
|
||||
{% endif %}
|
||||
|
||||
{% if count or human_description_en %}
|
||||
<h3>
|
||||
{% if count == count_limit + 1 %}>{{ "{:,}".format(count_limit) }} rows
|
||||
{% if allow_execute_sql and query.sql %} <a class="count-sql" style="font-size: 0.8em;" href="{{ urls.database_query(database, count_sql) }}">count all</a>{% endif %}
|
||||
{% elif count or count == 0 %}{{ "{:,}".format(count) }} row{% if count == 1 %}{% else %}s{% endif %}{% endif %}
|
||||
{% if filtered_table_rows_count or human_description_en %}
|
||||
<h3>{% if filtered_table_rows_count or filtered_table_rows_count == 0 %}{{ "{:,}".format(filtered_table_rows_count) }} row{% if filtered_table_rows_count == 1 %}{% else %}s{% endif %}{% endif %}
|
||||
{% if human_description_en %}{{ human_description_en }}{% endif %}
|
||||
</h3>
|
||||
{% endif %}
|
||||
|
||||
<form class="core" class="filters" action="{{ urls.table(database, table) }}" method="get">
|
||||
<form class="filters" action="{{ urls.table(database, table) }}" method="get">
|
||||
{% if supports_search %}
|
||||
<div class="search-row"><label for="_search">Search:</label><input id="_search" type="search" name="_search" value="{{ search }}"></div>
|
||||
{% endif %}
|
||||
|
|
@ -81,7 +94,7 @@
|
|||
</div><div class="select-wrapper filter-op">
|
||||
<select name="_filter_op">
|
||||
{% for key, display, no_argument in filters.lookups() %}
|
||||
<option value="{{ key }}{% if no_argument %}__1{% endif %}">{{ display }}</option>
|
||||
<option value="{{ key }}{% if no_argument %}__1{% endif %}"{% if key == lookup %} selected{% endif %}>{{ display }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
</div><input type="text" name="_filter_value" class="filter-value">
|
||||
|
|
@ -152,7 +165,7 @@
|
|||
<a href="{{ append_querystring(renderers['json'], '_shape=object') }}">object</a>
|
||||
{% endif %}
|
||||
</p>
|
||||
<form class="core" action="{{ url_csv_path }}" method="get">
|
||||
<form action="{{ url_csv_path }}" method="get">
|
||||
<p>
|
||||
CSV options:
|
||||
<label><input type="checkbox" name="_dl"> download file</label>
|
||||
|
|
@ -175,41 +188,4 @@
|
|||
<pre class="wrapped-sql">{{ view_definition }}</pre>
|
||||
{% endif %}
|
||||
|
||||
{% if allow_execute_sql and query.sql %}
|
||||
<script>
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
const countLink = document.querySelector('a.count-sql');
|
||||
if (countLink) {
|
||||
countLink.addEventListener('click', async function(ev) {
|
||||
ev.preventDefault();
|
||||
// Replace countLink with span with same style attribute
|
||||
const span = document.createElement('span');
|
||||
span.textContent = 'counting...';
|
||||
span.setAttribute('style', countLink.getAttribute('style'));
|
||||
countLink.replaceWith(span);
|
||||
countLink.setAttribute('disabled', 'disabled');
|
||||
let url = countLink.href.replace(/(\?|$)/, '.json$1');
|
||||
try {
|
||||
const response = await fetch(url);
|
||||
console.log({response});
|
||||
const data = await response.json();
|
||||
console.log({data});
|
||||
if (!response.ok) {
|
||||
console.log('throw error');
|
||||
throw new Error(data.title || data.error);
|
||||
}
|
||||
const count = data['rows'][0]['count(*)'];
|
||||
const formattedCount = count.toLocaleString();
|
||||
span.closest('h3').textContent = formattedCount + ' rows';
|
||||
} catch (error) {
|
||||
console.log('Update', span, 'with error message', error);
|
||||
span.textContent = error.message;
|
||||
span.style.color = 'red';
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
</script>
|
||||
{% endif %}
|
||||
|
||||
{% endblock %}
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ def trace_child_tasks():
|
|||
|
||||
|
||||
@contextmanager
|
||||
def trace(trace_type, **kwargs):
|
||||
def trace(type, **kwargs):
|
||||
assert not TRACE_RESERVED_KEYS.intersection(
|
||||
kwargs.keys()
|
||||
), f".trace() keyword parameters cannot include {TRACE_RESERVED_KEYS}"
|
||||
|
|
@ -45,24 +45,17 @@ def trace(trace_type, **kwargs):
|
|||
yield kwargs
|
||||
return
|
||||
start = time.perf_counter()
|
||||
captured_error = None
|
||||
try:
|
||||
yield kwargs
|
||||
except Exception as ex:
|
||||
captured_error = ex
|
||||
raise
|
||||
finally:
|
||||
end = time.perf_counter()
|
||||
trace_info = {
|
||||
"type": trace_type,
|
||||
"start": start,
|
||||
"end": end,
|
||||
"duration_ms": (end - start) * 1000,
|
||||
"traceback": traceback.format_list(traceback.extract_stack(limit=6)[:-3]),
|
||||
"error": str(captured_error) if captured_error else None,
|
||||
}
|
||||
trace_info.update(kwargs)
|
||||
tracer.append(trace_info)
|
||||
yield kwargs
|
||||
end = time.perf_counter()
|
||||
trace_info = {
|
||||
"type": type,
|
||||
"start": start,
|
||||
"end": end,
|
||||
"duration_ms": (end - start) * 1000,
|
||||
"traceback": traceback.format_list(traceback.extract_stack(limit=6)[:-3]),
|
||||
}
|
||||
trace_info.update(kwargs)
|
||||
tracer.append(trace_info)
|
||||
|
||||
|
||||
@contextmanager
|
||||
|
|
@ -97,7 +90,6 @@ class AsgiTracer:
|
|||
|
||||
async def wrapped_send(message):
|
||||
nonlocal accumulated_body, size_limit_exceeded, response_headers
|
||||
|
||||
if message["type"] == "http.response.start":
|
||||
response_headers = message["headers"]
|
||||
await send(message)
|
||||
|
|
@ -110,12 +102,11 @@ class AsgiTracer:
|
|||
# Accumulate body until the end or until size is exceeded
|
||||
accumulated_body += message["body"]
|
||||
if len(accumulated_body) > self.max_body_bytes:
|
||||
# Send what we have accumulated so far
|
||||
await send(
|
||||
{
|
||||
"type": "http.response.body",
|
||||
"body": accumulated_body,
|
||||
"more_body": bool(message.get("more_body")),
|
||||
"more_body": True,
|
||||
}
|
||||
)
|
||||
size_limit_exceeded = True
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from .utils import tilde_encode, path_with_format, PrefixedUrlString
|
||||
from .utils import tilde_encode, path_with_format, HASH_LENGTH, PrefixedUrlString
|
||||
import urllib
|
||||
|
||||
|
||||
|
|
@ -31,12 +31,6 @@ class Urls:
|
|||
db = self.ds.get_database(database)
|
||||
return self.path(tilde_encode(db.route), format=format)
|
||||
|
||||
def database_query(self, database, sql, format=None):
|
||||
path = f"{self.database(database)}/-/query?" + urllib.parse.urlencode(
|
||||
{"sql": sql}
|
||||
)
|
||||
return self.path(path, format=format)
|
||||
|
||||
def table(self, database, table, format=None):
|
||||
path = f"{self.database(database)}/{tilde_encode(table)}"
|
||||
if format is not None:
|
||||
|
|
|
|||
|
|
@ -1,10 +1,7 @@
|
|||
import asyncio
|
||||
from contextlib import contextmanager
|
||||
import aiofiles
|
||||
import click
|
||||
from collections import OrderedDict, namedtuple, Counter
|
||||
import copy
|
||||
import dataclasses
|
||||
import base64
|
||||
import hashlib
|
||||
import inspect
|
||||
|
|
@ -20,66 +17,11 @@ import time
|
|||
import types
|
||||
import secrets
|
||||
import shutil
|
||||
from typing import Iterable, List, Tuple
|
||||
import urllib
|
||||
import yaml
|
||||
from .shutil_backport import copytree
|
||||
from .sqlite import sqlite3, supports_table_xinfo
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from datasette.database import Database
|
||||
from datasette.permissions import Resource
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class PaginatedResources:
|
||||
"""Paginated results from allowed_resources query."""
|
||||
|
||||
resources: List["Resource"]
|
||||
next: str | None # Keyset token for next page (None if no more results)
|
||||
_datasette: typing.Any = dataclasses.field(default=None, repr=False)
|
||||
_action: str = dataclasses.field(default=None, repr=False)
|
||||
_actor: typing.Any = dataclasses.field(default=None, repr=False)
|
||||
_parent: str | None = dataclasses.field(default=None, repr=False)
|
||||
_include_is_private: bool = dataclasses.field(default=False, repr=False)
|
||||
_include_reasons: bool = dataclasses.field(default=False, repr=False)
|
||||
_limit: int = dataclasses.field(default=100, repr=False)
|
||||
|
||||
async def all(self):
|
||||
"""
|
||||
Async generator that yields all resources across all pages.
|
||||
|
||||
Automatically handles pagination under the hood. This is useful when you need
|
||||
to iterate through all results without manually managing pagination tokens.
|
||||
|
||||
Yields:
|
||||
Resource objects one at a time
|
||||
|
||||
Example:
|
||||
page = await datasette.allowed_resources("view-table", actor)
|
||||
async for table in page.all():
|
||||
print(f"{table.parent}/{table.child}")
|
||||
"""
|
||||
# Yield all resources from current page
|
||||
for resource in self.resources:
|
||||
yield resource
|
||||
|
||||
# Continue fetching subsequent pages if there are more
|
||||
next_token = self.next
|
||||
while next_token:
|
||||
page = await self._datasette.allowed_resources(
|
||||
self._action,
|
||||
self._actor,
|
||||
parent=self._parent,
|
||||
include_is_private=self._include_is_private,
|
||||
include_reasons=self._include_reasons,
|
||||
limit=self._limit,
|
||||
next=next_token,
|
||||
)
|
||||
for resource in page.resources:
|
||||
yield resource
|
||||
next_token = page.next
|
||||
|
||||
|
||||
# From https://www.sqlite.org/lang_keywords.html
|
||||
reserved_words = set(
|
||||
|
|
@ -300,7 +242,6 @@ allowed_pragmas = (
|
|||
"schema_version",
|
||||
"table_info",
|
||||
"table_xinfo",
|
||||
"table_list",
|
||||
)
|
||||
disallawed_sql_res = [
|
||||
(
|
||||
|
|
@ -461,9 +402,9 @@ def make_dockerfile(
|
|||
apt_get_extras = apt_get_extras_
|
||||
if spatialite:
|
||||
apt_get_extras.extend(["python3-dev", "gcc", "libsqlite3-mod-spatialite"])
|
||||
environment_variables["SQLITE_EXTENSIONS"] = (
|
||||
"/usr/lib/x86_64-linux-gnu/mod_spatialite.so"
|
||||
)
|
||||
environment_variables[
|
||||
"SQLITE_EXTENSIONS"
|
||||
] = "/usr/lib/x86_64-linux-gnu/mod_spatialite.so"
|
||||
return """
|
||||
FROM python:3.11.0-slim-bullseye
|
||||
COPY . /app
|
||||
|
|
@ -475,11 +416,9 @@ RUN datasette inspect {files} --inspect-file inspect-data.json
|
|||
ENV PORT {port}
|
||||
EXPOSE {port}
|
||||
CMD {cmd}""".format(
|
||||
apt_get_extras=(
|
||||
APT_GET_DOCKERFILE_EXTRAS.format(" ".join(apt_get_extras))
|
||||
if apt_get_extras
|
||||
else ""
|
||||
),
|
||||
apt_get_extras=APT_GET_DOCKERFILE_EXTRAS.format(" ".join(apt_get_extras))
|
||||
if apt_get_extras
|
||||
else "",
|
||||
environment_variables="\n".join(
|
||||
[
|
||||
"ENV {} '{}'".format(key, value)
|
||||
|
|
@ -770,7 +709,7 @@ def to_css_class(s):
|
|||
"""
|
||||
if css_class_re.match(s):
|
||||
return s
|
||||
md5_suffix = md5_not_usedforsecurity(s)[:6]
|
||||
md5_suffix = hashlib.md5(s.encode("utf8")).hexdigest()[:6]
|
||||
# Strip leading _, -
|
||||
s = s.lstrip("_").lstrip("-")
|
||||
# Replace any whitespace with hyphens
|
||||
|
|
@ -889,18 +828,9 @@ _infinities = {float("inf"), float("-inf")}
|
|||
|
||||
|
||||
def remove_infinites(row):
|
||||
to_check = row
|
||||
if isinstance(row, dict):
|
||||
to_check = row.values()
|
||||
if not any((c in _infinities) if isinstance(c, float) else 0 for c in to_check):
|
||||
return row
|
||||
if isinstance(row, dict):
|
||||
return {
|
||||
k: (None if (isinstance(v, float) and v in _infinities) else v)
|
||||
for k, v in row.items()
|
||||
}
|
||||
else:
|
||||
if any((c in _infinities) if isinstance(c, float) else 0 for c in row):
|
||||
return [None if (isinstance(c, float) and c in _infinities) else c for c in row]
|
||||
return row
|
||||
|
||||
|
||||
class StaticMount(click.ParamType):
|
||||
|
|
@ -1107,8 +1037,7 @@ def resolve_env_secrets(config, environ):
|
|||
if list(config.keys()) == ["$env"]:
|
||||
return environ.get(list(config.values())[0])
|
||||
elif list(config.keys()) == ["$file"]:
|
||||
with open(list(config.values())[0]) as fp:
|
||||
return fp.read()
|
||||
return open(list(config.values())[0]).read()
|
||||
else:
|
||||
return {
|
||||
key: resolve_env_secrets(value, environ)
|
||||
|
|
@ -1185,34 +1114,17 @@ class StartupError(Exception):
|
|||
pass
|
||||
|
||||
|
||||
_single_line_comment_re = re.compile(r"--.*")
|
||||
_multi_line_comment_re = re.compile(r"/\*.*?\*/", re.DOTALL)
|
||||
_single_quote_re = re.compile(r"'(?:''|[^'])*'")
|
||||
_double_quote_re = re.compile(r'"(?:\"\"|[^"])*"')
|
||||
_named_param_re = re.compile(r":(\w+)")
|
||||
_re_named_parameter = re.compile(":([a-zA-Z0-9_]+)")
|
||||
|
||||
|
||||
@documented
|
||||
def named_parameters(sql: str) -> List[str]:
|
||||
"""
|
||||
Given a SQL statement, return a list of named parameters that are used in the statement
|
||||
|
||||
e.g. for ``select * from foo where id=:id`` this would return ``["id"]``
|
||||
"""
|
||||
sql = _single_line_comment_re.sub("", sql)
|
||||
sql = _multi_line_comment_re.sub("", sql)
|
||||
sql = _single_quote_re.sub("", sql)
|
||||
sql = _double_quote_re.sub("", sql)
|
||||
# Extract parameters from what is left
|
||||
return _named_param_re.findall(sql)
|
||||
|
||||
|
||||
async def derive_named_parameters(db: "Database", sql: str) -> List[str]:
|
||||
"""
|
||||
This undocumented but stable method exists for backwards compatibility
|
||||
with plugins that were using it before it switched to named_parameters()
|
||||
"""
|
||||
return named_parameters(sql)
|
||||
async def derive_named_parameters(db, sql):
|
||||
explain = "explain {}".format(sql.strip().rstrip(";"))
|
||||
possible_params = _re_named_parameter.findall(sql)
|
||||
try:
|
||||
results = await db.execute(explain, {p: None for p in possible_params})
|
||||
return [row["p4"].lstrip(":") for row in results if row["opcode"] == "Variable"]
|
||||
except sqlite3.DatabaseError:
|
||||
return possible_params
|
||||
|
||||
|
||||
def add_cors_headers(headers):
|
||||
|
|
@ -1220,7 +1132,6 @@ def add_cors_headers(headers):
|
|||
headers["Access-Control-Allow-Headers"] = "Authorization, Content-Type"
|
||||
headers["Access-Control-Expose-Headers"] = "Link"
|
||||
headers["Access-Control-Allow-Methods"] = "GET, POST, HEAD, OPTIONS"
|
||||
headers["Access-Control-Max-Age"] = "3600"
|
||||
|
||||
|
||||
_TILDE_ENCODING_SAFE = frozenset(
|
||||
|
|
@ -1298,218 +1209,3 @@ async def row_sql_params_pks(db, table, pk_values):
|
|||
for i, pk_value in enumerate(pk_values):
|
||||
params[f"p{i}"] = pk_value
|
||||
return sql, params, pks
|
||||
|
||||
|
||||
def _handle_pair(key: str, value: str) -> dict:
|
||||
"""
|
||||
Turn a key-value pair into a nested dictionary.
|
||||
foo, bar => {'foo': 'bar'}
|
||||
foo.bar, baz => {'foo': {'bar': 'baz'}}
|
||||
foo.bar, [1, 2, 3] => {'foo': {'bar': [1, 2, 3]}}
|
||||
foo.bar, "baz" => {'foo': {'bar': 'baz'}}
|
||||
foo.bar, '{"baz": "qux"}' => {'foo': {'bar': "{'baz': 'qux'}"}}
|
||||
"""
|
||||
try:
|
||||
value = json.loads(value)
|
||||
except json.JSONDecodeError:
|
||||
# If it doesn't parse as JSON, treat it as a string
|
||||
pass
|
||||
|
||||
keys = key.split(".")
|
||||
result = current_dict = {}
|
||||
|
||||
for k in keys[:-1]:
|
||||
current_dict[k] = {}
|
||||
current_dict = current_dict[k]
|
||||
|
||||
current_dict[keys[-1]] = value
|
||||
return result
|
||||
|
||||
|
||||
def _combine(base: dict, update: dict) -> dict:
|
||||
"""
|
||||
Recursively merge two dictionaries.
|
||||
"""
|
||||
for key, value in update.items():
|
||||
if isinstance(value, dict) and key in base and isinstance(base[key], dict):
|
||||
base[key] = _combine(base[key], value)
|
||||
else:
|
||||
base[key] = value
|
||||
return base
|
||||
|
||||
|
||||
def pairs_to_nested_config(pairs: typing.List[typing.Tuple[str, typing.Any]]) -> dict:
|
||||
"""
|
||||
Parse a list of key-value pairs into a nested dictionary.
|
||||
"""
|
||||
result = {}
|
||||
for key, value in pairs:
|
||||
parsed_pair = _handle_pair(key, value)
|
||||
result = _combine(result, parsed_pair)
|
||||
return result
|
||||
|
||||
|
||||
def make_slot_function(name, datasette, request, **kwargs):
|
||||
from datasette.plugins import pm
|
||||
|
||||
method = getattr(pm.hook, name, None)
|
||||
assert method is not None, "No hook found for {}".format(name)
|
||||
|
||||
async def inner():
|
||||
html_bits = []
|
||||
for hook in method(datasette=datasette, request=request, **kwargs):
|
||||
html = await await_me_maybe(hook)
|
||||
if html is not None:
|
||||
html_bits.append(html)
|
||||
return markupsafe.Markup("".join(html_bits))
|
||||
|
||||
return inner
|
||||
|
||||
|
||||
def prune_empty_dicts(d: dict):
|
||||
"""
|
||||
Recursively prune all empty dictionaries from a given dictionary.
|
||||
"""
|
||||
for key, value in list(d.items()):
|
||||
if isinstance(value, dict):
|
||||
prune_empty_dicts(value)
|
||||
if value == {}:
|
||||
d.pop(key, None)
|
||||
|
||||
|
||||
def move_plugins_and_allow(source: dict, destination: dict) -> Tuple[dict, dict]:
|
||||
"""
|
||||
Move 'plugins' and 'allow' keys from source to destination dictionary. Creates
|
||||
hierarchy in destination if needed. After moving, recursively remove any keys
|
||||
in the source that are left empty.
|
||||
"""
|
||||
source = copy.deepcopy(source)
|
||||
destination = copy.deepcopy(destination)
|
||||
|
||||
def recursive_move(src, dest, path=None):
|
||||
if path is None:
|
||||
path = []
|
||||
for key, value in list(src.items()):
|
||||
new_path = path + [key]
|
||||
if key in ("plugins", "allow"):
|
||||
# Navigate and create the hierarchy in destination if needed
|
||||
d = dest
|
||||
for step in path:
|
||||
d = d.setdefault(step, {})
|
||||
# Move the plugins
|
||||
d[key] = value
|
||||
# Remove the plugins from source
|
||||
src.pop(key, None)
|
||||
elif isinstance(value, dict):
|
||||
recursive_move(value, dest, new_path)
|
||||
# After moving, check if the current dictionary is empty and remove it if so
|
||||
if not value:
|
||||
src.pop(key, None)
|
||||
|
||||
recursive_move(source, destination)
|
||||
prune_empty_dicts(source)
|
||||
return source, destination
|
||||
|
||||
|
||||
_table_config_keys = (
|
||||
"hidden",
|
||||
"sort",
|
||||
"sort_desc",
|
||||
"size",
|
||||
"sortable_columns",
|
||||
"label_column",
|
||||
"facets",
|
||||
"fts_table",
|
||||
"fts_pk",
|
||||
"searchmode",
|
||||
)
|
||||
|
||||
|
||||
def move_table_config(metadata: dict, config: dict):
|
||||
"""
|
||||
Move all known table configuration keys from metadata to config.
|
||||
"""
|
||||
if "databases" not in metadata:
|
||||
return metadata, config
|
||||
metadata = copy.deepcopy(metadata)
|
||||
config = copy.deepcopy(config)
|
||||
for database_name, database in metadata["databases"].items():
|
||||
if "tables" not in database:
|
||||
continue
|
||||
for table_name, table in database["tables"].items():
|
||||
for key in _table_config_keys:
|
||||
if key in table:
|
||||
config.setdefault("databases", {}).setdefault(
|
||||
database_name, {}
|
||||
).setdefault("tables", {}).setdefault(table_name, {})[
|
||||
key
|
||||
] = table.pop(
|
||||
key
|
||||
)
|
||||
prune_empty_dicts(metadata)
|
||||
return metadata, config
|
||||
|
||||
|
||||
def redact_keys(original: dict, key_patterns: Iterable) -> dict:
|
||||
"""
|
||||
Recursively redact sensitive keys in a dictionary based on given patterns
|
||||
|
||||
:param original: The original dictionary
|
||||
:param key_patterns: A list of substring patterns to redact
|
||||
:return: A copy of the original dictionary with sensitive values redacted
|
||||
"""
|
||||
|
||||
def redact(data):
|
||||
if isinstance(data, dict):
|
||||
return {
|
||||
k: (
|
||||
redact(v)
|
||||
if not any(pattern in k for pattern in key_patterns)
|
||||
else "***"
|
||||
)
|
||||
for k, v in data.items()
|
||||
}
|
||||
elif isinstance(data, list):
|
||||
return [redact(item) for item in data]
|
||||
else:
|
||||
return data
|
||||
|
||||
return redact(original)
|
||||
|
||||
|
||||
def md5_not_usedforsecurity(s):
|
||||
try:
|
||||
return hashlib.md5(s.encode("utf8"), usedforsecurity=False).hexdigest()
|
||||
except TypeError:
|
||||
# For Python 3.8 which does not support usedforsecurity=False
|
||||
return hashlib.md5(s.encode("utf8")).hexdigest()
|
||||
|
||||
|
||||
_etag_cache = {}
|
||||
|
||||
|
||||
async def calculate_etag(filepath, chunk_size=4096):
|
||||
if filepath in _etag_cache:
|
||||
return _etag_cache[filepath]
|
||||
|
||||
hasher = hashlib.md5()
|
||||
async with aiofiles.open(filepath, "rb") as f:
|
||||
while True:
|
||||
chunk = await f.read(chunk_size)
|
||||
if not chunk:
|
||||
break
|
||||
hasher.update(chunk)
|
||||
|
||||
etag = f'"{hasher.hexdigest()}"'
|
||||
_etag_cache[filepath] = etag
|
||||
|
||||
return etag
|
||||
|
||||
|
||||
def deep_dict_update(dict1, dict2):
|
||||
for key, value in dict2.items():
|
||||
if isinstance(value, dict):
|
||||
dict1[key] = deep_dict_update(dict1.get(key, type(value)()), value)
|
||||
else:
|
||||
dict1[key] = value
|
||||
return dict1
|
||||
|
|
|
|||
|
|
@ -1,587 +0,0 @@
|
|||
"""
|
||||
SQL query builder for hierarchical permission checking.
|
||||
|
||||
This module implements a cascading permission system based on the pattern
|
||||
from https://github.com/simonw/research/tree/main/sqlite-permissions-poc
|
||||
|
||||
It builds SQL queries that:
|
||||
|
||||
1. Start with all resources of a given type (from resource_type.resources_sql())
|
||||
2. Gather permission rules from plugins (via permission_resources_sql hook)
|
||||
3. Apply cascading logic: child → parent → global
|
||||
4. Apply DENY-beats-ALLOW at each level
|
||||
|
||||
The core pattern is:
|
||||
- Resources are identified by (parent, child) tuples
|
||||
- Rules are evaluated at three levels:
|
||||
- child: exact match on (parent, child)
|
||||
- parent: match on (parent, NULL)
|
||||
- global: match on (NULL, NULL)
|
||||
- At the same level, DENY (allow=0) beats ALLOW (allow=1)
|
||||
- Across levels, child beats parent beats global
|
||||
"""
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from datasette.utils.permissions import gather_permission_sql_from_hooks
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from datasette.app import Datasette
|
||||
|
||||
|
||||
async def build_allowed_resources_sql(
|
||||
datasette: "Datasette",
|
||||
actor: dict | None,
|
||||
action: str,
|
||||
*,
|
||||
parent: str | None = None,
|
||||
include_is_private: bool = False,
|
||||
) -> tuple[str, dict]:
|
||||
"""
|
||||
Build a SQL query that returns all resources the actor can access for this action.
|
||||
|
||||
Args:
|
||||
datasette: The Datasette instance
|
||||
actor: The actor dict (or None for unauthenticated)
|
||||
action: The action name (e.g., "view-table", "view-database")
|
||||
parent: Optional parent filter to limit results (e.g., database name)
|
||||
include_is_private: If True, add is_private column showing if anonymous cannot access
|
||||
|
||||
Returns:
|
||||
A tuple of (sql_query, params_dict)
|
||||
|
||||
The returned SQL query will have three columns (or four with include_is_private):
|
||||
- parent: The parent resource identifier (or NULL)
|
||||
- child: The child resource identifier (or NULL)
|
||||
- reason: The reason from the rule that granted access
|
||||
- is_private: (if include_is_private) 1 if anonymous cannot access, 0 otherwise
|
||||
|
||||
Example:
|
||||
For action="view-table", this might return:
|
||||
SELECT parent, child, reason FROM ... WHERE is_allowed = 1
|
||||
|
||||
Results would be like:
|
||||
('analytics', 'users', 'role-based: analysts can access analytics DB')
|
||||
('analytics', 'events', 'role-based: analysts can access analytics DB')
|
||||
('production', 'orders', 'business-exception: allow production.orders for carol')
|
||||
"""
|
||||
# Get the Action object
|
||||
action_obj = datasette.actions.get(action)
|
||||
if not action_obj:
|
||||
raise ValueError(f"Unknown action: {action}")
|
||||
|
||||
# If this action also_requires another action, we need to combine the queries
|
||||
if action_obj.also_requires:
|
||||
# Build both queries
|
||||
main_sql, main_params = await _build_single_action_sql(
|
||||
datasette,
|
||||
actor,
|
||||
action,
|
||||
parent=parent,
|
||||
include_is_private=include_is_private,
|
||||
)
|
||||
required_sql, required_params = await _build_single_action_sql(
|
||||
datasette,
|
||||
actor,
|
||||
action_obj.also_requires,
|
||||
parent=parent,
|
||||
include_is_private=False,
|
||||
)
|
||||
|
||||
# Merge parameters - they should have identical values for :actor, :actor_id, etc.
|
||||
all_params = {**main_params, **required_params}
|
||||
if parent is not None:
|
||||
all_params["filter_parent"] = parent
|
||||
|
||||
# Combine with INNER JOIN - only resources allowed by both actions
|
||||
combined_sql = f"""
|
||||
WITH
|
||||
main_allowed AS (
|
||||
{main_sql}
|
||||
),
|
||||
required_allowed AS (
|
||||
{required_sql}
|
||||
)
|
||||
SELECT m.parent, m.child, m.reason"""
|
||||
|
||||
if include_is_private:
|
||||
combined_sql += ", m.is_private"
|
||||
|
||||
combined_sql += """
|
||||
FROM main_allowed m
|
||||
INNER JOIN required_allowed r
|
||||
ON ((m.parent = r.parent) OR (m.parent IS NULL AND r.parent IS NULL))
|
||||
AND ((m.child = r.child) OR (m.child IS NULL AND r.child IS NULL))
|
||||
"""
|
||||
|
||||
if parent is not None:
|
||||
combined_sql += "WHERE m.parent = :filter_parent\n"
|
||||
|
||||
combined_sql += "ORDER BY m.parent, m.child"
|
||||
|
||||
return combined_sql, all_params
|
||||
|
||||
# No also_requires, build single action query
|
||||
return await _build_single_action_sql(
|
||||
datasette, actor, action, parent=parent, include_is_private=include_is_private
|
||||
)
|
||||
|
||||
|
||||
async def _build_single_action_sql(
|
||||
datasette: "Datasette",
|
||||
actor: dict | None,
|
||||
action: str,
|
||||
*,
|
||||
parent: str | None = None,
|
||||
include_is_private: bool = False,
|
||||
) -> tuple[str, dict]:
|
||||
"""
|
||||
Build SQL for a single action (internal helper for build_allowed_resources_sql).
|
||||
|
||||
This contains the original logic from build_allowed_resources_sql, extracted
|
||||
to allow combining multiple actions when also_requires is used.
|
||||
"""
|
||||
# Get the Action object
|
||||
action_obj = datasette.actions.get(action)
|
||||
if not action_obj:
|
||||
raise ValueError(f"Unknown action: {action}")
|
||||
|
||||
# Get base resources SQL from the resource class
|
||||
base_resources_sql = await action_obj.resource_class.resources_sql(datasette)
|
||||
|
||||
permission_sqls = await gather_permission_sql_from_hooks(
|
||||
datasette=datasette,
|
||||
actor=actor,
|
||||
action=action,
|
||||
)
|
||||
|
||||
# If permission_sqls is the sentinel, skip all permission checks
|
||||
# Return SQL that allows all resources
|
||||
from datasette.utils.permissions import SKIP_PERMISSION_CHECKS
|
||||
|
||||
if permission_sqls is SKIP_PERMISSION_CHECKS:
|
||||
cols = "parent, child, 'skip_permission_checks' AS reason"
|
||||
if include_is_private:
|
||||
cols += ", 0 AS is_private"
|
||||
return f"SELECT {cols} FROM ({base_resources_sql})", {}
|
||||
|
||||
all_params = {}
|
||||
rule_sqls = []
|
||||
restriction_sqls = []
|
||||
|
||||
for permission_sql in permission_sqls:
|
||||
# Always collect params (even from restriction-only plugins)
|
||||
all_params.update(permission_sql.params or {})
|
||||
|
||||
# Collect restriction SQL filters
|
||||
if permission_sql.restriction_sql:
|
||||
restriction_sqls.append(permission_sql.restriction_sql)
|
||||
|
||||
# Skip plugins that only provide restriction_sql (no permission rules)
|
||||
if permission_sql.sql is None:
|
||||
continue
|
||||
rule_sqls.append(
|
||||
f"""
|
||||
SELECT parent, child, allow, reason, '{permission_sql.source}' AS source_plugin FROM (
|
||||
{permission_sql.sql}
|
||||
)
|
||||
""".strip()
|
||||
)
|
||||
|
||||
# If no rules, return empty result (deny all)
|
||||
if not rule_sqls:
|
||||
empty_cols = "NULL AS parent, NULL AS child, NULL AS reason"
|
||||
if include_is_private:
|
||||
empty_cols += ", NULL AS is_private"
|
||||
return f"SELECT {empty_cols} WHERE 0", {}
|
||||
|
||||
# Build the cascading permission query
|
||||
rules_union = " UNION ALL ".join(rule_sqls)
|
||||
|
||||
# Build the main query
|
||||
query_parts = [
|
||||
"WITH",
|
||||
"base AS (",
|
||||
f" {base_resources_sql}",
|
||||
"),",
|
||||
"all_rules AS (",
|
||||
f" {rules_union}",
|
||||
"),",
|
||||
]
|
||||
|
||||
# If include_is_private, we need to build anonymous permissions too
|
||||
if include_is_private:
|
||||
anon_permission_sqls = await gather_permission_sql_from_hooks(
|
||||
datasette=datasette,
|
||||
actor=None,
|
||||
action=action,
|
||||
)
|
||||
anon_sqls_rewritten = []
|
||||
anon_params = {}
|
||||
|
||||
for permission_sql in anon_permission_sqls:
|
||||
# Skip plugins that only provide restriction_sql (no permission rules)
|
||||
if permission_sql.sql is None:
|
||||
continue
|
||||
rewritten_sql = permission_sql.sql
|
||||
for key, value in (permission_sql.params or {}).items():
|
||||
anon_key = f"anon_{key}"
|
||||
anon_params[anon_key] = value
|
||||
rewritten_sql = rewritten_sql.replace(f":{key}", f":{anon_key}")
|
||||
anon_sqls_rewritten.append(rewritten_sql)
|
||||
|
||||
all_params.update(anon_params)
|
||||
|
||||
if anon_sqls_rewritten:
|
||||
anon_rules_union = " UNION ALL ".join(anon_sqls_rewritten)
|
||||
query_parts.extend(
|
||||
[
|
||||
"anon_rules AS (",
|
||||
f" {anon_rules_union}",
|
||||
"),",
|
||||
]
|
||||
)
|
||||
|
||||
# Continue with the cascading logic
|
||||
query_parts.extend(
|
||||
[
|
||||
"child_lvl AS (",
|
||||
" SELECT b.parent, b.child,",
|
||||
" MAX(CASE WHEN ar.allow = 0 THEN 1 ELSE 0 END) AS any_deny,",
|
||||
" MAX(CASE WHEN ar.allow = 1 THEN 1 ELSE 0 END) AS any_allow,",
|
||||
" json_group_array(CASE WHEN ar.allow = 0 THEN ar.source_plugin || ': ' || ar.reason END) AS deny_reasons,",
|
||||
" json_group_array(CASE WHEN ar.allow = 1 THEN ar.source_plugin || ': ' || ar.reason END) AS allow_reasons",
|
||||
" FROM base b",
|
||||
" LEFT JOIN all_rules ar ON ar.parent = b.parent AND ar.child = b.child",
|
||||
" GROUP BY b.parent, b.child",
|
||||
"),",
|
||||
"parent_lvl AS (",
|
||||
" SELECT b.parent, b.child,",
|
||||
" MAX(CASE WHEN ar.allow = 0 THEN 1 ELSE 0 END) AS any_deny,",
|
||||
" MAX(CASE WHEN ar.allow = 1 THEN 1 ELSE 0 END) AS any_allow,",
|
||||
" json_group_array(CASE WHEN ar.allow = 0 THEN ar.source_plugin || ': ' || ar.reason END) AS deny_reasons,",
|
||||
" json_group_array(CASE WHEN ar.allow = 1 THEN ar.source_plugin || ': ' || ar.reason END) AS allow_reasons",
|
||||
" FROM base b",
|
||||
" LEFT JOIN all_rules ar ON ar.parent = b.parent AND ar.child IS NULL",
|
||||
" GROUP BY b.parent, b.child",
|
||||
"),",
|
||||
"global_lvl AS (",
|
||||
" SELECT b.parent, b.child,",
|
||||
" MAX(CASE WHEN ar.allow = 0 THEN 1 ELSE 0 END) AS any_deny,",
|
||||
" MAX(CASE WHEN ar.allow = 1 THEN 1 ELSE 0 END) AS any_allow,",
|
||||
" json_group_array(CASE WHEN ar.allow = 0 THEN ar.source_plugin || ': ' || ar.reason END) AS deny_reasons,",
|
||||
" json_group_array(CASE WHEN ar.allow = 1 THEN ar.source_plugin || ': ' || ar.reason END) AS allow_reasons",
|
||||
" FROM base b",
|
||||
" LEFT JOIN all_rules ar ON ar.parent IS NULL AND ar.child IS NULL",
|
||||
" GROUP BY b.parent, b.child",
|
||||
"),",
|
||||
]
|
||||
)
|
||||
|
||||
# Add anonymous decision logic if needed
|
||||
if include_is_private:
|
||||
query_parts.extend(
|
||||
[
|
||||
"anon_child_lvl AS (",
|
||||
" SELECT b.parent, b.child,",
|
||||
" MAX(CASE WHEN ar.allow = 0 THEN 1 ELSE 0 END) AS any_deny,",
|
||||
" MAX(CASE WHEN ar.allow = 1 THEN 1 ELSE 0 END) AS any_allow",
|
||||
" FROM base b",
|
||||
" LEFT JOIN anon_rules ar ON ar.parent = b.parent AND ar.child = b.child",
|
||||
" GROUP BY b.parent, b.child",
|
||||
"),",
|
||||
"anon_parent_lvl AS (",
|
||||
" SELECT b.parent, b.child,",
|
||||
" MAX(CASE WHEN ar.allow = 0 THEN 1 ELSE 0 END) AS any_deny,",
|
||||
" MAX(CASE WHEN ar.allow = 1 THEN 1 ELSE 0 END) AS any_allow",
|
||||
" FROM base b",
|
||||
" LEFT JOIN anon_rules ar ON ar.parent = b.parent AND ar.child IS NULL",
|
||||
" GROUP BY b.parent, b.child",
|
||||
"),",
|
||||
"anon_global_lvl AS (",
|
||||
" SELECT b.parent, b.child,",
|
||||
" MAX(CASE WHEN ar.allow = 0 THEN 1 ELSE 0 END) AS any_deny,",
|
||||
" MAX(CASE WHEN ar.allow = 1 THEN 1 ELSE 0 END) AS any_allow",
|
||||
" FROM base b",
|
||||
" LEFT JOIN anon_rules ar ON ar.parent IS NULL AND ar.child IS NULL",
|
||||
" GROUP BY b.parent, b.child",
|
||||
"),",
|
||||
"anon_decisions AS (",
|
||||
" SELECT",
|
||||
" b.parent, b.child,",
|
||||
" CASE",
|
||||
" WHEN acl.any_deny = 1 THEN 0",
|
||||
" WHEN acl.any_allow = 1 THEN 1",
|
||||
" WHEN apl.any_deny = 1 THEN 0",
|
||||
" WHEN apl.any_allow = 1 THEN 1",
|
||||
" WHEN agl.any_deny = 1 THEN 0",
|
||||
" WHEN agl.any_allow = 1 THEN 1",
|
||||
" ELSE 0",
|
||||
" END AS anon_is_allowed",
|
||||
" FROM base b",
|
||||
" JOIN anon_child_lvl acl ON b.parent = acl.parent AND (b.child = acl.child OR (b.child IS NULL AND acl.child IS NULL))",
|
||||
" JOIN anon_parent_lvl apl ON b.parent = apl.parent AND (b.child = apl.child OR (b.child IS NULL AND apl.child IS NULL))",
|
||||
" JOIN anon_global_lvl agl ON b.parent = agl.parent AND (b.child = agl.child OR (b.child IS NULL AND agl.child IS NULL))",
|
||||
"),",
|
||||
]
|
||||
)
|
||||
|
||||
# Final decisions
|
||||
query_parts.extend(
|
||||
[
|
||||
"decisions AS (",
|
||||
" SELECT",
|
||||
" b.parent, b.child,",
|
||||
" -- Cascading permission logic: child → parent → global, DENY beats ALLOW at each level",
|
||||
" -- Priority order:",
|
||||
" -- 1. Child-level deny (most specific, blocks access)",
|
||||
" -- 2. Child-level allow (most specific, grants access)",
|
||||
" -- 3. Parent-level deny (intermediate, blocks access)",
|
||||
" -- 4. Parent-level allow (intermediate, grants access)",
|
||||
" -- 5. Global-level deny (least specific, blocks access)",
|
||||
" -- 6. Global-level allow (least specific, grants access)",
|
||||
" -- 7. Default deny (no rules match)",
|
||||
" CASE",
|
||||
" WHEN cl.any_deny = 1 THEN 0",
|
||||
" WHEN cl.any_allow = 1 THEN 1",
|
||||
" WHEN pl.any_deny = 1 THEN 0",
|
||||
" WHEN pl.any_allow = 1 THEN 1",
|
||||
" WHEN gl.any_deny = 1 THEN 0",
|
||||
" WHEN gl.any_allow = 1 THEN 1",
|
||||
" ELSE 0",
|
||||
" END AS is_allowed,",
|
||||
" CASE",
|
||||
" WHEN cl.any_deny = 1 THEN cl.deny_reasons",
|
||||
" WHEN cl.any_allow = 1 THEN cl.allow_reasons",
|
||||
" WHEN pl.any_deny = 1 THEN pl.deny_reasons",
|
||||
" WHEN pl.any_allow = 1 THEN pl.allow_reasons",
|
||||
" WHEN gl.any_deny = 1 THEN gl.deny_reasons",
|
||||
" WHEN gl.any_allow = 1 THEN gl.allow_reasons",
|
||||
" ELSE '[]'",
|
||||
" END AS reason",
|
||||
]
|
||||
)
|
||||
|
||||
if include_is_private:
|
||||
query_parts.append(
|
||||
" , CASE WHEN ad.anon_is_allowed = 0 THEN 1 ELSE 0 END AS is_private"
|
||||
)
|
||||
|
||||
query_parts.extend(
|
||||
[
|
||||
" FROM base b",
|
||||
" JOIN child_lvl cl ON b.parent = cl.parent AND (b.child = cl.child OR (b.child IS NULL AND cl.child IS NULL))",
|
||||
" JOIN parent_lvl pl ON b.parent = pl.parent AND (b.child = pl.child OR (b.child IS NULL AND pl.child IS NULL))",
|
||||
" JOIN global_lvl gl ON b.parent = gl.parent AND (b.child = gl.child OR (b.child IS NULL AND gl.child IS NULL))",
|
||||
]
|
||||
)
|
||||
|
||||
if include_is_private:
|
||||
query_parts.append(
|
||||
" JOIN anon_decisions ad ON b.parent = ad.parent AND (b.child = ad.child OR (b.child IS NULL AND ad.child IS NULL))"
|
||||
)
|
||||
|
||||
query_parts.append(")")
|
||||
|
||||
# Add restriction list CTE if there are restrictions
|
||||
if restriction_sqls:
|
||||
# Wrap each restriction_sql in a subquery to avoid operator precedence issues
|
||||
# with UNION ALL inside the restriction SQL statements
|
||||
restriction_intersect = "\nINTERSECT\n".join(
|
||||
f"SELECT * FROM ({sql})" for sql in restriction_sqls
|
||||
)
|
||||
query_parts.extend(
|
||||
[",", "restriction_list AS (", f" {restriction_intersect}", ")"]
|
||||
)
|
||||
|
||||
# Final SELECT
|
||||
select_cols = "parent, child, reason"
|
||||
if include_is_private:
|
||||
select_cols += ", is_private"
|
||||
|
||||
query_parts.append(f"SELECT {select_cols}")
|
||||
query_parts.append("FROM decisions")
|
||||
query_parts.append("WHERE is_allowed = 1")
|
||||
|
||||
# Add restriction filter if there are restrictions
|
||||
if restriction_sqls:
|
||||
query_parts.append(
|
||||
"""
|
||||
AND EXISTS (
|
||||
SELECT 1 FROM restriction_list r
|
||||
WHERE (r.parent = decisions.parent OR r.parent IS NULL)
|
||||
AND (r.child = decisions.child OR r.child IS NULL)
|
||||
)"""
|
||||
)
|
||||
|
||||
# Add parent filter if specified
|
||||
if parent is not None:
|
||||
query_parts.append(" AND parent = :filter_parent")
|
||||
all_params["filter_parent"] = parent
|
||||
|
||||
query_parts.append("ORDER BY parent, child")
|
||||
|
||||
query = "\n".join(query_parts)
|
||||
return query, all_params
|
||||
|
||||
|
||||
async def build_permission_rules_sql(
|
||||
datasette: "Datasette", actor: dict | None, action: str
|
||||
) -> tuple[str, dict]:
|
||||
"""
|
||||
Build the UNION SQL and params for all permission rules for a given actor and action.
|
||||
|
||||
Returns:
|
||||
A tuple of (sql, params) where sql is a UNION ALL query that returns
|
||||
(parent, child, allow, reason, source_plugin) rows.
|
||||
"""
|
||||
# Get the Action object
|
||||
action_obj = datasette.actions.get(action)
|
||||
if not action_obj:
|
||||
raise ValueError(f"Unknown action: {action}")
|
||||
|
||||
permission_sqls = await gather_permission_sql_from_hooks(
|
||||
datasette=datasette,
|
||||
actor=actor,
|
||||
action=action,
|
||||
)
|
||||
|
||||
# If permission_sqls is the sentinel, skip all permission checks
|
||||
# Return SQL that allows everything
|
||||
from datasette.utils.permissions import SKIP_PERMISSION_CHECKS
|
||||
|
||||
if permission_sqls is SKIP_PERMISSION_CHECKS:
|
||||
return (
|
||||
"SELECT NULL AS parent, NULL AS child, 1 AS allow, 'skip_permission_checks' AS reason, 'skip' AS source_plugin",
|
||||
{},
|
||||
[],
|
||||
)
|
||||
|
||||
if not permission_sqls:
|
||||
return (
|
||||
"SELECT NULL AS parent, NULL AS child, 0 AS allow, NULL AS reason, NULL AS source_plugin WHERE 0",
|
||||
{},
|
||||
[],
|
||||
)
|
||||
|
||||
union_parts = []
|
||||
all_params = {}
|
||||
restriction_sqls = []
|
||||
|
||||
for permission_sql in permission_sqls:
|
||||
all_params.update(permission_sql.params or {})
|
||||
|
||||
# Collect restriction SQL filters
|
||||
if permission_sql.restriction_sql:
|
||||
restriction_sqls.append(permission_sql.restriction_sql)
|
||||
|
||||
# Skip plugins that only provide restriction_sql (no permission rules)
|
||||
if permission_sql.sql is None:
|
||||
continue
|
||||
|
||||
union_parts.append(
|
||||
f"""
|
||||
SELECT parent, child, allow, reason, '{permission_sql.source}' AS source_plugin FROM (
|
||||
{permission_sql.sql}
|
||||
)
|
||||
""".strip()
|
||||
)
|
||||
|
||||
rules_union = " UNION ALL ".join(union_parts)
|
||||
return rules_union, all_params, restriction_sqls
|
||||
|
||||
|
||||
async def check_permission_for_resource(
|
||||
*,
|
||||
datasette: "Datasette",
|
||||
actor: dict | None,
|
||||
action: str,
|
||||
parent: str | None,
|
||||
child: str | None,
|
||||
) -> bool:
|
||||
"""
|
||||
Check if an actor has permission for a specific action on a specific resource.
|
||||
|
||||
Args:
|
||||
datasette: The Datasette instance
|
||||
actor: The actor dict (or None)
|
||||
action: The action name
|
||||
parent: The parent resource identifier (e.g., database name, or None)
|
||||
child: The child resource identifier (e.g., table name, or None)
|
||||
|
||||
Returns:
|
||||
True if the actor is allowed, False otherwise
|
||||
|
||||
This builds the cascading permission query and checks if the specific
|
||||
resource is in the allowed set.
|
||||
"""
|
||||
rules_union, all_params, restriction_sqls = await build_permission_rules_sql(
|
||||
datasette, actor, action
|
||||
)
|
||||
|
||||
# If no rules (empty SQL), default deny
|
||||
if not rules_union:
|
||||
return False
|
||||
|
||||
# Add parameters for the resource we're checking
|
||||
all_params["_check_parent"] = parent
|
||||
all_params["_check_child"] = child
|
||||
|
||||
# If there are restriction filters, check if the resource passes them first
|
||||
if restriction_sqls:
|
||||
# Check if resource is in restriction allowlist
|
||||
# Database-level restrictions (parent, NULL) should match all children (parent, *)
|
||||
# Wrap each restriction_sql in a subquery to avoid operator precedence issues
|
||||
restriction_check = "\nINTERSECT\n".join(
|
||||
f"SELECT * FROM ({sql})" for sql in restriction_sqls
|
||||
)
|
||||
restriction_query = f"""
|
||||
WITH restriction_list AS (
|
||||
{restriction_check}
|
||||
)
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM restriction_list
|
||||
WHERE (parent = :_check_parent OR parent IS NULL)
|
||||
AND (child = :_check_child OR child IS NULL)
|
||||
) AS in_allowlist
|
||||
"""
|
||||
result = await datasette.get_internal_database().execute(
|
||||
restriction_query, all_params
|
||||
)
|
||||
if result.rows and not result.rows[0][0]:
|
||||
# Resource not in restriction allowlist - deny
|
||||
return False
|
||||
|
||||
query = f"""
|
||||
WITH
|
||||
all_rules AS (
|
||||
{rules_union}
|
||||
),
|
||||
matched_rules AS (
|
||||
SELECT ar.*,
|
||||
CASE
|
||||
WHEN ar.child IS NOT NULL THEN 2 -- child-level (most specific)
|
||||
WHEN ar.parent IS NOT NULL THEN 1 -- parent-level
|
||||
ELSE 0 -- root/global
|
||||
END AS depth
|
||||
FROM all_rules ar
|
||||
WHERE (ar.parent IS NULL OR ar.parent = :_check_parent)
|
||||
AND (ar.child IS NULL OR ar.child = :_check_child)
|
||||
),
|
||||
winner AS (
|
||||
SELECT *
|
||||
FROM matched_rules
|
||||
ORDER BY
|
||||
depth DESC, -- specificity first (higher depth wins)
|
||||
CASE WHEN allow=0 THEN 0 ELSE 1 END, -- then deny over allow
|
||||
source_plugin -- stable tie-break
|
||||
LIMIT 1
|
||||
)
|
||||
SELECT COALESCE((SELECT allow FROM winner), 0) AS is_allowed
|
||||
"""
|
||||
|
||||
# Execute the query against the internal database
|
||||
result = await datasette.get_internal_database().execute(query, all_params)
|
||||
if result.rows:
|
||||
return bool(result.rows[0][0])
|
||||
return False
|
||||
|
|
@ -1,12 +1,11 @@
|
|||
import json
|
||||
from datasette.utils import MultiParams, calculate_etag
|
||||
from datasette.utils import MultiParams
|
||||
from mimetypes import guess_type
|
||||
from urllib.parse import parse_qs, urlunparse, parse_qsl
|
||||
from pathlib import Path
|
||||
from http.cookies import SimpleCookie, Morsel
|
||||
import aiofiles
|
||||
import aiofiles.os
|
||||
import re
|
||||
|
||||
# Workaround for adding samesite support to pre 3.8 python
|
||||
Morsel._reserved["samesite"] = "SameSite"
|
||||
|
|
@ -23,21 +22,21 @@ class NotFound(Base400):
|
|||
|
||||
|
||||
class DatabaseNotFound(NotFound):
|
||||
def __init__(self, database_name):
|
||||
def __init__(self, message, database_name):
|
||||
super().__init__(message)
|
||||
self.database_name = database_name
|
||||
super().__init__("Database not found")
|
||||
|
||||
|
||||
class TableNotFound(NotFound):
|
||||
def __init__(self, database_name, table):
|
||||
super().__init__("Table not found")
|
||||
def __init__(self, message, database_name, table):
|
||||
super().__init__(message)
|
||||
self.database_name = database_name
|
||||
self.table = table
|
||||
|
||||
|
||||
class RowNotFound(NotFound):
|
||||
def __init__(self, database_name, table, pk_values):
|
||||
super().__init__("Row not found")
|
||||
def __init__(self, message, database_name, table, pk_values):
|
||||
super().__init__(message)
|
||||
self.database_name = database_name
|
||||
self.table_name = table
|
||||
self.pk_values = pk_values
|
||||
|
|
@ -249,9 +248,6 @@ async def asgi_send_html(send, html, status=200, headers=None):
|
|||
|
||||
|
||||
async def asgi_send_redirect(send, location, status=302):
|
||||
# Prevent open redirect vulnerability: strip multiple leading slashes
|
||||
# //example.com would be interpreted as a protocol-relative URL (e.g., https://example.com/)
|
||||
location = re.sub(r"^/+", "/", location)
|
||||
await asgi_send(
|
||||
send,
|
||||
"",
|
||||
|
|
@ -289,7 +285,6 @@ async def asgi_send_file(
|
|||
headers = headers or {}
|
||||
if filename:
|
||||
headers["content-disposition"] = f'attachment; filename="{filename}"'
|
||||
|
||||
first = True
|
||||
headers["content-length"] = str((await aiofiles.os.stat(str(filepath))).st_size)
|
||||
async with aiofiles.open(str(filepath), mode="rb") as fp:
|
||||
|
|
@ -312,14 +307,9 @@ async def asgi_send_file(
|
|||
|
||||
def asgi_static(root_path, chunk_size=4096, headers=None, content_type=None):
|
||||
root_path = Path(root_path)
|
||||
static_headers = {}
|
||||
|
||||
if headers:
|
||||
static_headers = headers.copy()
|
||||
|
||||
async def inner_static(request, send):
|
||||
path = request.scope["url_route"]["kwargs"]["path"]
|
||||
headers = static_headers.copy()
|
||||
try:
|
||||
full_path = (root_path / path).resolve().absolute()
|
||||
except FileNotFoundError:
|
||||
|
|
@ -335,15 +325,7 @@ def asgi_static(root_path, chunk_size=4096, headers=None, content_type=None):
|
|||
await asgi_send_html(send, "404: Path not inside root path", 404)
|
||||
return
|
||||
try:
|
||||
# Calculate ETag for filepath
|
||||
etag = await calculate_etag(full_path, chunk_size=chunk_size)
|
||||
headers["ETag"] = etag
|
||||
if_none_match = request.headers.get("if-none-match")
|
||||
if if_none_match and if_none_match == etag:
|
||||
return await asgi_send(send, "", 304)
|
||||
await asgi_send_file(
|
||||
send, full_path, chunk_size=chunk_size, headers=headers
|
||||
)
|
||||
await asgi_send_file(send, full_path, chunk_size=chunk_size)
|
||||
except FileNotFoundError:
|
||||
await asgi_send_html(send, "404: File not found", 404)
|
||||
return
|
||||
|
|
@ -467,18 +449,3 @@ class AsgiFileDownload:
|
|||
content_type=self.content_type,
|
||||
headers=self.headers,
|
||||
)
|
||||
|
||||
|
||||
class AsgiRunOnFirstRequest:
|
||||
def __init__(self, asgi, on_startup):
|
||||
assert isinstance(on_startup, list)
|
||||
self.asgi = asgi
|
||||
self.on_startup = on_startup
|
||||
self._started = False
|
||||
|
||||
async def __call__(self, scope, receive, send):
|
||||
if not self._started:
|
||||
self._started = True
|
||||
for hook in self.on_startup:
|
||||
await hook()
|
||||
return await self.asgi(scope, receive, send)
|
||||
|
|
|
|||
|
|
@ -1,25 +0,0 @@
|
|||
import inspect
|
||||
import types
|
||||
from typing import NamedTuple, Any
|
||||
|
||||
|
||||
class CallableStatus(NamedTuple):
|
||||
is_callable: bool
|
||||
is_async_callable: bool
|
||||
|
||||
|
||||
def check_callable(obj: Any) -> CallableStatus:
|
||||
if not callable(obj):
|
||||
return CallableStatus(False, False)
|
||||
|
||||
if isinstance(obj, type):
|
||||
# It's a class
|
||||
return CallableStatus(True, False)
|
||||
|
||||
if isinstance(obj, types.FunctionType):
|
||||
return CallableStatus(True, inspect.iscoroutinefunction(obj))
|
||||
|
||||
if hasattr(obj, "__call__"):
|
||||
return CallableStatus(True, inspect.iscoroutinefunction(obj.__call__))
|
||||
|
||||
assert False, "obj {} is somehow callable with no __call__ method".format(repr(obj))
|
||||
|
|
@ -5,29 +5,21 @@ from datasette.utils import table_column_details
|
|||
async def init_internal_db(db):
|
||||
create_tables_sql = textwrap.dedent(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS catalog_databases (
|
||||
CREATE TABLE IF NOT EXISTS databases (
|
||||
database_name TEXT PRIMARY KEY,
|
||||
path TEXT,
|
||||
is_memory INTEGER,
|
||||
schema_version INTEGER
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS catalog_tables (
|
||||
CREATE TABLE IF NOT EXISTS tables (
|
||||
database_name TEXT,
|
||||
table_name TEXT,
|
||||
rootpage INTEGER,
|
||||
sql TEXT,
|
||||
PRIMARY KEY (database_name, table_name),
|
||||
FOREIGN KEY (database_name) REFERENCES catalog_databases(database_name)
|
||||
FOREIGN KEY (database_name) REFERENCES databases(database_name)
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS catalog_views (
|
||||
database_name TEXT,
|
||||
view_name TEXT,
|
||||
rootpage INTEGER,
|
||||
sql TEXT,
|
||||
PRIMARY KEY (database_name, view_name),
|
||||
FOREIGN KEY (database_name) REFERENCES catalog_databases(database_name)
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS catalog_columns (
|
||||
CREATE TABLE IF NOT EXISTS columns (
|
||||
database_name TEXT,
|
||||
table_name TEXT,
|
||||
cid INTEGER,
|
||||
|
|
@ -38,10 +30,10 @@ async def init_internal_db(db):
|
|||
is_pk INTEGER, -- renamed from pk
|
||||
hidden INTEGER,
|
||||
PRIMARY KEY (database_name, table_name, name),
|
||||
FOREIGN KEY (database_name) REFERENCES catalog_databases(database_name),
|
||||
FOREIGN KEY (database_name, table_name) REFERENCES catalog_tables(database_name, table_name)
|
||||
FOREIGN KEY (database_name) REFERENCES databases(database_name),
|
||||
FOREIGN KEY (database_name, table_name) REFERENCES tables(database_name, table_name)
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS catalog_indexes (
|
||||
CREATE TABLE IF NOT EXISTS indexes (
|
||||
database_name TEXT,
|
||||
table_name TEXT,
|
||||
seq INTEGER,
|
||||
|
|
@ -50,10 +42,10 @@ async def init_internal_db(db):
|
|||
origin TEXT,
|
||||
partial INTEGER,
|
||||
PRIMARY KEY (database_name, table_name, name),
|
||||
FOREIGN KEY (database_name) REFERENCES catalog_databases(database_name),
|
||||
FOREIGN KEY (database_name, table_name) REFERENCES catalog_tables(database_name, table_name)
|
||||
FOREIGN KEY (database_name) REFERENCES databases(database_name),
|
||||
FOREIGN KEY (database_name, table_name) REFERENCES tables(database_name, table_name)
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS catalog_foreign_keys (
|
||||
CREATE TABLE IF NOT EXISTS foreign_keys (
|
||||
database_name TEXT,
|
||||
table_name TEXT,
|
||||
id INTEGER,
|
||||
|
|
@ -65,92 +57,35 @@ async def init_internal_db(db):
|
|||
on_delete TEXT,
|
||||
match TEXT,
|
||||
PRIMARY KEY (database_name, table_name, id, seq),
|
||||
FOREIGN KEY (database_name) REFERENCES catalog_databases(database_name),
|
||||
FOREIGN KEY (database_name, table_name) REFERENCES catalog_tables(database_name, table_name)
|
||||
FOREIGN KEY (database_name) REFERENCES databases(database_name),
|
||||
FOREIGN KEY (database_name, table_name) REFERENCES tables(database_name, table_name)
|
||||
);
|
||||
"""
|
||||
).strip()
|
||||
await db.execute_write_script(create_tables_sql)
|
||||
await initialize_metadata_tables(db)
|
||||
|
||||
|
||||
async def initialize_metadata_tables(db):
|
||||
await db.execute_write_script(
|
||||
textwrap.dedent(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS metadata_instance (
|
||||
key text,
|
||||
value text,
|
||||
unique(key)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS metadata_databases (
|
||||
database_name text,
|
||||
key text,
|
||||
value text,
|
||||
unique(database_name, key)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS metadata_resources (
|
||||
database_name text,
|
||||
resource_name text,
|
||||
key text,
|
||||
value text,
|
||||
unique(database_name, resource_name, key)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS metadata_columns (
|
||||
database_name text,
|
||||
resource_name text,
|
||||
column_name text,
|
||||
key text,
|
||||
value text,
|
||||
unique(database_name, resource_name, column_name, key)
|
||||
);
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
async def populate_schema_tables(internal_db, db):
|
||||
database_name = db.name
|
||||
|
||||
def delete_everything(conn):
|
||||
conn.execute("DELETE FROM tables WHERE database_name = ?", [database_name])
|
||||
conn.execute("DELETE FROM columns WHERE database_name = ?", [database_name])
|
||||
conn.execute(
|
||||
"DELETE FROM catalog_tables WHERE database_name = ?", [database_name]
|
||||
)
|
||||
conn.execute(
|
||||
"DELETE FROM catalog_views WHERE database_name = ?", [database_name]
|
||||
)
|
||||
conn.execute(
|
||||
"DELETE FROM catalog_columns WHERE database_name = ?", [database_name]
|
||||
)
|
||||
conn.execute(
|
||||
"DELETE FROM catalog_foreign_keys WHERE database_name = ?",
|
||||
[database_name],
|
||||
)
|
||||
conn.execute(
|
||||
"DELETE FROM catalog_indexes WHERE database_name = ?", [database_name]
|
||||
"DELETE FROM foreign_keys WHERE database_name = ?", [database_name]
|
||||
)
|
||||
conn.execute("DELETE FROM indexes WHERE database_name = ?", [database_name])
|
||||
|
||||
await internal_db.execute_write_fn(delete_everything)
|
||||
|
||||
tables = (await db.execute("select * from sqlite_master WHERE type = 'table'")).rows
|
||||
views = (await db.execute("select * from sqlite_master WHERE type = 'view'")).rows
|
||||
|
||||
def collect_info(conn):
|
||||
tables_to_insert = []
|
||||
views_to_insert = []
|
||||
columns_to_insert = []
|
||||
foreign_keys_to_insert = []
|
||||
indexes_to_insert = []
|
||||
|
||||
for view in views:
|
||||
view_name = view["name"]
|
||||
views_to_insert.append(
|
||||
(database_name, view_name, view["rootpage"], view["sql"])
|
||||
)
|
||||
|
||||
for table in tables:
|
||||
table_name = table["name"]
|
||||
tables_to_insert.append(
|
||||
|
|
@ -184,7 +119,6 @@ async def populate_schema_tables(internal_db, db):
|
|||
)
|
||||
return (
|
||||
tables_to_insert,
|
||||
views_to_insert,
|
||||
columns_to_insert,
|
||||
foreign_keys_to_insert,
|
||||
indexes_to_insert,
|
||||
|
|
@ -192,7 +126,6 @@ async def populate_schema_tables(internal_db, db):
|
|||
|
||||
(
|
||||
tables_to_insert,
|
||||
views_to_insert,
|
||||
columns_to_insert,
|
||||
foreign_keys_to_insert,
|
||||
indexes_to_insert,
|
||||
|
|
@ -200,21 +133,14 @@ async def populate_schema_tables(internal_db, db):
|
|||
|
||||
await internal_db.execute_write_many(
|
||||
"""
|
||||
INSERT INTO catalog_tables (database_name, table_name, rootpage, sql)
|
||||
INSERT INTO tables (database_name, table_name, rootpage, sql)
|
||||
values (?, ?, ?, ?)
|
||||
""",
|
||||
tables_to_insert,
|
||||
)
|
||||
await internal_db.execute_write_many(
|
||||
"""
|
||||
INSERT INTO catalog_views (database_name, view_name, rootpage, sql)
|
||||
values (?, ?, ?, ?)
|
||||
""",
|
||||
views_to_insert,
|
||||
)
|
||||
await internal_db.execute_write_many(
|
||||
"""
|
||||
INSERT INTO catalog_columns (
|
||||
INSERT INTO columns (
|
||||
database_name, table_name, cid, name, type, "notnull", default_value, is_pk, hidden
|
||||
) VALUES (
|
||||
:database_name, :table_name, :cid, :name, :type, :notnull, :default_value, :is_pk, :hidden
|
||||
|
|
@ -224,7 +150,7 @@ async def populate_schema_tables(internal_db, db):
|
|||
)
|
||||
await internal_db.execute_write_many(
|
||||
"""
|
||||
INSERT INTO catalog_foreign_keys (
|
||||
INSERT INTO foreign_keys (
|
||||
database_name, table_name, "id", seq, "table", "from", "to", on_update, on_delete, match
|
||||
) VALUES (
|
||||
:database_name, :table_name, :id, :seq, :table, :from, :to, :on_update, :on_delete, :match
|
||||
|
|
@ -234,7 +160,7 @@ async def populate_schema_tables(internal_db, db):
|
|||
)
|
||||
await internal_db.execute_write_many(
|
||||
"""
|
||||
INSERT INTO catalog_indexes (
|
||||
INSERT INTO indexes (
|
||||
database_name, table_name, seq, name, "unique", origin, partial
|
||||
) VALUES (
|
||||
:database_name, :table_name, :seq, :name, :unique, :origin, :partial
|
||||
|
|
|
|||
|
|
@ -1,439 +0,0 @@
|
|||
# perm_utils.py
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from typing import Any, Dict, Iterable, List, Sequence, Tuple
|
||||
import sqlite3
|
||||
|
||||
from datasette.permissions import PermissionSQL
|
||||
from datasette.plugins import pm
|
||||
from datasette.utils import await_me_maybe
|
||||
|
||||
|
||||
# Sentinel object to indicate permission checks should be skipped
|
||||
SKIP_PERMISSION_CHECKS = object()
|
||||
|
||||
|
||||
async def gather_permission_sql_from_hooks(
|
||||
*, datasette, actor: dict | None, action: str
|
||||
) -> List[PermissionSQL] | object:
|
||||
"""Collect PermissionSQL objects from the permission_resources_sql hook.
|
||||
|
||||
Ensures that each returned PermissionSQL has a populated ``source``.
|
||||
|
||||
Returns SKIP_PERMISSION_CHECKS sentinel if skip_permission_checks context variable
|
||||
is set, signaling that all permission checks should be bypassed.
|
||||
"""
|
||||
from datasette.permissions import _skip_permission_checks
|
||||
|
||||
# Check if we should skip permission checks BEFORE calling hooks
|
||||
# This avoids creating unawaited coroutines
|
||||
if _skip_permission_checks.get():
|
||||
return SKIP_PERMISSION_CHECKS
|
||||
|
||||
hook_caller = pm.hook.permission_resources_sql
|
||||
hookimpls = hook_caller.get_hookimpls()
|
||||
hook_results = list(hook_caller(datasette=datasette, actor=actor, action=action))
|
||||
|
||||
collected: List[PermissionSQL] = []
|
||||
actor_json = json.dumps(actor) if actor is not None else None
|
||||
actor_id = actor.get("id") if isinstance(actor, dict) else None
|
||||
|
||||
for index, result in enumerate(hook_results):
|
||||
hookimpl = hookimpls[index]
|
||||
resolved = await await_me_maybe(result)
|
||||
default_source = _plugin_name_from_hookimpl(hookimpl)
|
||||
for permission_sql in _iter_permission_sql_from_result(resolved, action=action):
|
||||
if not permission_sql.source:
|
||||
permission_sql.source = default_source
|
||||
params = permission_sql.params or {}
|
||||
params.setdefault("action", action)
|
||||
params.setdefault("actor", actor_json)
|
||||
params.setdefault("actor_id", actor_id)
|
||||
collected.append(permission_sql)
|
||||
|
||||
return collected
|
||||
|
||||
|
||||
def _plugin_name_from_hookimpl(hookimpl) -> str:
|
||||
if getattr(hookimpl, "plugin_name", None):
|
||||
return hookimpl.plugin_name
|
||||
plugin = getattr(hookimpl, "plugin", None)
|
||||
if hasattr(plugin, "__name__"):
|
||||
return plugin.__name__
|
||||
return repr(plugin)
|
||||
|
||||
|
||||
def _iter_permission_sql_from_result(
|
||||
result: Any, *, action: str
|
||||
) -> Iterable[PermissionSQL]:
|
||||
if result is None:
|
||||
return []
|
||||
if isinstance(result, PermissionSQL):
|
||||
return [result]
|
||||
if isinstance(result, (list, tuple)):
|
||||
collected: List[PermissionSQL] = []
|
||||
for item in result:
|
||||
collected.extend(_iter_permission_sql_from_result(item, action=action))
|
||||
return collected
|
||||
if callable(result):
|
||||
permission_sql = result(action) # type: ignore[call-arg]
|
||||
return _iter_permission_sql_from_result(permission_sql, action=action)
|
||||
raise TypeError(
|
||||
"Plugin providers must return PermissionSQL instances, sequences, or callables"
|
||||
)
|
||||
|
||||
|
||||
# -----------------------------
|
||||
# Plugin interface & utilities
|
||||
# -----------------------------
|
||||
|
||||
|
||||
def build_rules_union(
|
||||
actor: dict | None, plugins: Sequence[PermissionSQL]
|
||||
) -> Tuple[str, Dict[str, Any]]:
|
||||
"""
|
||||
Compose plugin SQL into a UNION ALL.
|
||||
|
||||
Returns:
|
||||
union_sql: a SELECT with columns (parent, child, allow, reason, source_plugin)
|
||||
params: dict of bound parameters including :actor (JSON), :actor_id, and plugin params
|
||||
|
||||
Note: Plugins are responsible for ensuring their parameter names don't conflict.
|
||||
The system reserves these parameter names: :actor, :actor_id, :action, :filter_parent
|
||||
Plugin parameters should be prefixed with a unique identifier (e.g., source name).
|
||||
"""
|
||||
parts: List[str] = []
|
||||
actor_json = json.dumps(actor) if actor else None
|
||||
actor_id = actor.get("id") if actor else None
|
||||
params: Dict[str, Any] = {"actor": actor_json, "actor_id": actor_id}
|
||||
|
||||
for p in plugins:
|
||||
# No namespacing - just use plugin params as-is
|
||||
params.update(p.params or {})
|
||||
|
||||
# Skip plugins that only provide restriction_sql (no permission rules)
|
||||
if p.sql is None:
|
||||
continue
|
||||
|
||||
parts.append(
|
||||
f"""
|
||||
SELECT parent, child, allow, reason, '{p.source}' AS source_plugin FROM (
|
||||
{p.sql}
|
||||
)
|
||||
""".strip()
|
||||
)
|
||||
|
||||
if not parts:
|
||||
# Empty UNION that returns no rows
|
||||
union_sql = "SELECT NULL parent, NULL child, NULL allow, NULL reason, 'none' source_plugin WHERE 0"
|
||||
else:
|
||||
union_sql = "\nUNION ALL\n".join(parts)
|
||||
|
||||
return union_sql, params
|
||||
|
||||
|
||||
# -----------------------------------------------
|
||||
# Core resolvers (no temp tables, no custom UDFs)
|
||||
# -----------------------------------------------
|
||||
|
||||
|
||||
async def resolve_permissions_from_catalog(
|
||||
db,
|
||||
actor: dict | None,
|
||||
plugins: Sequence[Any],
|
||||
action: str,
|
||||
candidate_sql: str,
|
||||
candidate_params: Dict[str, Any] | None = None,
|
||||
*,
|
||||
implicit_deny: bool = True,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Resolve permissions by embedding the provided *candidate_sql* in a CTE.
|
||||
|
||||
Expectations:
|
||||
- candidate_sql SELECTs: parent TEXT, child TEXT
|
||||
(Use child=NULL for parent-scoped actions like "execute-sql".)
|
||||
- *db* exposes: rows = await db.execute(sql, params)
|
||||
where rows is an iterable of sqlite3.Row
|
||||
- plugins: hook results handled by await_me_maybe - can be sync/async,
|
||||
single PermissionSQL, list, or callable returning PermissionSQL
|
||||
- actor is the actor dict (or None), made available as :actor (JSON), :actor_id, and :action
|
||||
|
||||
Decision policy:
|
||||
1) Specificity first: child (depth=2) > parent (depth=1) > root (depth=0)
|
||||
2) Within the same depth: deny (0) beats allow (1)
|
||||
3) If no matching rule:
|
||||
- implicit_deny=True -> treat as allow=0, reason='implicit deny'
|
||||
- implicit_deny=False -> allow=None, reason=None
|
||||
|
||||
Returns: list of dict rows
|
||||
- parent, child, allow, reason, source_plugin, depth
|
||||
- resource (rendered "/parent/child" or "/parent" or "/")
|
||||
"""
|
||||
resolved_plugins: List[PermissionSQL] = []
|
||||
restriction_sqls: List[str] = []
|
||||
|
||||
for plugin in plugins:
|
||||
if callable(plugin) and not isinstance(plugin, PermissionSQL):
|
||||
resolved = plugin(action) # type: ignore[arg-type]
|
||||
else:
|
||||
resolved = plugin # type: ignore[assignment]
|
||||
if not isinstance(resolved, PermissionSQL):
|
||||
raise TypeError("Plugin providers must return PermissionSQL instances")
|
||||
resolved_plugins.append(resolved)
|
||||
|
||||
# Collect restriction SQL filters
|
||||
if resolved.restriction_sql:
|
||||
restriction_sqls.append(resolved.restriction_sql)
|
||||
|
||||
union_sql, rule_params = build_rules_union(actor, resolved_plugins)
|
||||
all_params = {
|
||||
**(candidate_params or {}),
|
||||
**rule_params,
|
||||
"action": action,
|
||||
}
|
||||
|
||||
sql = f"""
|
||||
WITH
|
||||
cands AS (
|
||||
{candidate_sql}
|
||||
),
|
||||
rules AS (
|
||||
{union_sql}
|
||||
),
|
||||
matched AS (
|
||||
SELECT
|
||||
c.parent, c.child,
|
||||
r.allow, r.reason, r.source_plugin,
|
||||
CASE
|
||||
WHEN r.child IS NOT NULL THEN 2 -- child-level (most specific)
|
||||
WHEN r.parent IS NOT NULL THEN 1 -- parent-level
|
||||
ELSE 0 -- root/global
|
||||
END AS depth
|
||||
FROM cands c
|
||||
JOIN rules r
|
||||
ON (r.parent IS NULL OR r.parent = c.parent)
|
||||
AND (r.child IS NULL OR r.child = c.child)
|
||||
),
|
||||
ranked AS (
|
||||
SELECT *,
|
||||
ROW_NUMBER() OVER (
|
||||
PARTITION BY parent, child
|
||||
ORDER BY
|
||||
depth DESC, -- specificity first
|
||||
CASE WHEN allow=0 THEN 0 ELSE 1 END, -- then deny over allow at same depth
|
||||
source_plugin -- stable tie-break
|
||||
) AS rn
|
||||
FROM matched
|
||||
),
|
||||
winner AS (
|
||||
SELECT parent, child,
|
||||
allow, reason, source_plugin, depth
|
||||
FROM ranked WHERE rn = 1
|
||||
)
|
||||
SELECT
|
||||
c.parent, c.child,
|
||||
COALESCE(w.allow, CASE WHEN :implicit_deny THEN 0 ELSE NULL END) AS allow,
|
||||
COALESCE(w.reason, CASE WHEN :implicit_deny THEN 'implicit deny' ELSE NULL END) AS reason,
|
||||
w.source_plugin,
|
||||
COALESCE(w.depth, -1) AS depth,
|
||||
:action AS action,
|
||||
CASE
|
||||
WHEN c.parent IS NULL THEN '/'
|
||||
WHEN c.child IS NULL THEN '/' || c.parent
|
||||
ELSE '/' || c.parent || '/' || c.child
|
||||
END AS resource
|
||||
FROM cands c
|
||||
LEFT JOIN winner w
|
||||
ON ((w.parent = c.parent) OR (w.parent IS NULL AND c.parent IS NULL))
|
||||
AND ((w.child = c.child ) OR (w.child IS NULL AND c.child IS NULL))
|
||||
ORDER BY c.parent, c.child
|
||||
"""
|
||||
|
||||
# If there are restriction filters, wrap the query with INTERSECT
|
||||
# This ensures only resources in the restriction allowlist are returned
|
||||
if restriction_sqls:
|
||||
# Start with the main query, but select only parent/child for the INTERSECT
|
||||
main_query_for_intersect = f"""
|
||||
WITH
|
||||
cands AS (
|
||||
{candidate_sql}
|
||||
),
|
||||
rules AS (
|
||||
{union_sql}
|
||||
),
|
||||
matched AS (
|
||||
SELECT
|
||||
c.parent, c.child,
|
||||
r.allow, r.reason, r.source_plugin,
|
||||
CASE
|
||||
WHEN r.child IS NOT NULL THEN 2 -- child-level (most specific)
|
||||
WHEN r.parent IS NOT NULL THEN 1 -- parent-level
|
||||
ELSE 0 -- root/global
|
||||
END AS depth
|
||||
FROM cands c
|
||||
JOIN rules r
|
||||
ON (r.parent IS NULL OR r.parent = c.parent)
|
||||
AND (r.child IS NULL OR r.child = c.child)
|
||||
),
|
||||
ranked AS (
|
||||
SELECT *,
|
||||
ROW_NUMBER() OVER (
|
||||
PARTITION BY parent, child
|
||||
ORDER BY
|
||||
depth DESC, -- specificity first
|
||||
CASE WHEN allow=0 THEN 0 ELSE 1 END, -- then deny over allow at same depth
|
||||
source_plugin -- stable tie-break
|
||||
) AS rn
|
||||
FROM matched
|
||||
),
|
||||
winner AS (
|
||||
SELECT parent, child,
|
||||
allow, reason, source_plugin, depth
|
||||
FROM ranked WHERE rn = 1
|
||||
),
|
||||
permitted_resources AS (
|
||||
SELECT c.parent, c.child
|
||||
FROM cands c
|
||||
LEFT JOIN winner w
|
||||
ON ((w.parent = c.parent) OR (w.parent IS NULL AND c.parent IS NULL))
|
||||
AND ((w.child = c.child ) OR (w.child IS NULL AND c.child IS NULL))
|
||||
WHERE COALESCE(w.allow, CASE WHEN :implicit_deny THEN 0 ELSE NULL END) = 1
|
||||
)
|
||||
SELECT parent, child FROM permitted_resources
|
||||
"""
|
||||
|
||||
# Build restriction list with INTERSECT (all must match)
|
||||
# Then filter to resources that match hierarchically
|
||||
# Wrap each restriction_sql in a subquery to avoid operator precedence issues
|
||||
# with UNION ALL inside the restriction SQL statements
|
||||
restriction_intersect = "\nINTERSECT\n".join(
|
||||
f"SELECT * FROM ({sql})" for sql in restriction_sqls
|
||||
)
|
||||
|
||||
# Combine: resources allowed by permissions AND in restriction allowlist
|
||||
# Database-level restrictions (parent, NULL) should match all children (parent, *)
|
||||
filtered_resources = f"""
|
||||
WITH restriction_list AS (
|
||||
{restriction_intersect}
|
||||
),
|
||||
permitted AS (
|
||||
{main_query_for_intersect}
|
||||
),
|
||||
filtered AS (
|
||||
SELECT p.parent, p.child
|
||||
FROM permitted p
|
||||
WHERE EXISTS (
|
||||
SELECT 1 FROM restriction_list r
|
||||
WHERE (r.parent = p.parent OR r.parent IS NULL)
|
||||
AND (r.child = p.child OR r.child IS NULL)
|
||||
)
|
||||
)
|
||||
"""
|
||||
|
||||
# Now join back to get full results for only the filtered resources
|
||||
sql = f"""
|
||||
{filtered_resources}
|
||||
, cands AS (
|
||||
{candidate_sql}
|
||||
),
|
||||
rules AS (
|
||||
{union_sql}
|
||||
),
|
||||
matched AS (
|
||||
SELECT
|
||||
c.parent, c.child,
|
||||
r.allow, r.reason, r.source_plugin,
|
||||
CASE
|
||||
WHEN r.child IS NOT NULL THEN 2 -- child-level (most specific)
|
||||
WHEN r.parent IS NOT NULL THEN 1 -- parent-level
|
||||
ELSE 0 -- root/global
|
||||
END AS depth
|
||||
FROM cands c
|
||||
JOIN rules r
|
||||
ON (r.parent IS NULL OR r.parent = c.parent)
|
||||
AND (r.child IS NULL OR r.child = c.child)
|
||||
),
|
||||
ranked AS (
|
||||
SELECT *,
|
||||
ROW_NUMBER() OVER (
|
||||
PARTITION BY parent, child
|
||||
ORDER BY
|
||||
depth DESC, -- specificity first
|
||||
CASE WHEN allow=0 THEN 0 ELSE 1 END, -- then deny over allow at same depth
|
||||
source_plugin -- stable tie-break
|
||||
) AS rn
|
||||
FROM matched
|
||||
),
|
||||
winner AS (
|
||||
SELECT parent, child,
|
||||
allow, reason, source_plugin, depth
|
||||
FROM ranked WHERE rn = 1
|
||||
)
|
||||
SELECT
|
||||
c.parent, c.child,
|
||||
COALESCE(w.allow, CASE WHEN :implicit_deny THEN 0 ELSE NULL END) AS allow,
|
||||
COALESCE(w.reason, CASE WHEN :implicit_deny THEN 'implicit deny' ELSE NULL END) AS reason,
|
||||
w.source_plugin,
|
||||
COALESCE(w.depth, -1) AS depth,
|
||||
:action AS action,
|
||||
CASE
|
||||
WHEN c.parent IS NULL THEN '/'
|
||||
WHEN c.child IS NULL THEN '/' || c.parent
|
||||
ELSE '/' || c.parent || '/' || c.child
|
||||
END AS resource
|
||||
FROM filtered c
|
||||
LEFT JOIN winner w
|
||||
ON ((w.parent = c.parent) OR (w.parent IS NULL AND c.parent IS NULL))
|
||||
AND ((w.child = c.child ) OR (w.child IS NULL AND c.child IS NULL))
|
||||
ORDER BY c.parent, c.child
|
||||
"""
|
||||
|
||||
rows_iter: Iterable[sqlite3.Row] = await db.execute(
|
||||
sql,
|
||||
{**all_params, "implicit_deny": 1 if implicit_deny else 0},
|
||||
)
|
||||
return [dict(r) for r in rows_iter]
|
||||
|
||||
|
||||
async def resolve_permissions_with_candidates(
|
||||
db,
|
||||
actor: dict | None,
|
||||
plugins: Sequence[Any],
|
||||
candidates: List[Tuple[str, str | None]],
|
||||
action: str,
|
||||
*,
|
||||
implicit_deny: bool = True,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Resolve permissions without any external candidate table by embedding
|
||||
the candidates as a UNION of parameterized SELECTs in a CTE.
|
||||
|
||||
candidates: list of (parent, child) where child can be None for parent-scoped actions.
|
||||
actor: actor dict (or None), made available as :actor (JSON), :actor_id, and :action
|
||||
"""
|
||||
# Build a small CTE for candidates.
|
||||
cand_rows_sql: List[str] = []
|
||||
cand_params: Dict[str, Any] = {}
|
||||
for i, (parent, child) in enumerate(candidates):
|
||||
pkey = f"cand_p_{i}"
|
||||
ckey = f"cand_c_{i}"
|
||||
cand_params[pkey] = parent
|
||||
cand_params[ckey] = child
|
||||
cand_rows_sql.append(f"SELECT :{pkey} AS parent, :{ckey} AS child")
|
||||
candidate_sql = (
|
||||
"\nUNION ALL\n".join(cand_rows_sql)
|
||||
if cand_rows_sql
|
||||
else "SELECT NULL AS parent, NULL AS child WHERE 0"
|
||||
)
|
||||
|
||||
return await resolve_permissions_from_catalog(
|
||||
db,
|
||||
actor,
|
||||
plugins,
|
||||
action,
|
||||
candidate_sql=candidate_sql,
|
||||
candidate_params=cand_params,
|
||||
implicit_deny=implicit_deny,
|
||||
)
|
||||
|
|
@ -4,7 +4,6 @@ Backported from Python 3.8.
|
|||
This code is licensed under the Python License:
|
||||
https://github.com/python/cpython/blob/v3.8.3/LICENSE
|
||||
"""
|
||||
|
||||
import os
|
||||
from shutil import copy, copy2, copystat, Error
|
||||
|
||||
|
|
|
|||
|
|
@ -16,11 +16,6 @@ class TestResponse:
|
|||
def status(self):
|
||||
return self.httpx_response.status_code
|
||||
|
||||
# Supports both for test-writing convenience
|
||||
@property
|
||||
def status_code(self):
|
||||
return self.status
|
||||
|
||||
@property
|
||||
def headers(self):
|
||||
return self.httpx_response.headers
|
||||
|
|
@ -29,14 +24,17 @@ class TestResponse:
|
|||
def body(self):
|
||||
return self.httpx_response.content
|
||||
|
||||
@property
|
||||
def content(self):
|
||||
return self.body
|
||||
|
||||
@property
|
||||
def cookies(self):
|
||||
return dict(self.httpx_response.cookies)
|
||||
|
||||
def cookie_was_deleted(self, cookie):
|
||||
return any(
|
||||
h
|
||||
for h in self.httpx_response.headers.get_list("set-cookie")
|
||||
if h.startswith(f'{cookie}="";')
|
||||
)
|
||||
|
||||
@property
|
||||
def json(self):
|
||||
return json.loads(self.text)
|
||||
|
|
@ -62,13 +60,10 @@ class TestClient:
|
|||
follow_redirects=False,
|
||||
redirect_count=0,
|
||||
method="GET",
|
||||
params=None,
|
||||
cookies=None,
|
||||
if_none_match=None,
|
||||
headers=None,
|
||||
):
|
||||
if params:
|
||||
path += "?" + urlencode(params, doseq=True)
|
||||
return await self._request(
|
||||
path=path,
|
||||
follow_redirects=follow_redirects,
|
||||
|
|
|
|||
|
|
@ -1,2 +1,2 @@
|
|||
__version__ = "1.0a23"
|
||||
__version__ = "1.0a1"
|
||||
__version_info__ = tuple(__version__.split("."))
|
||||
|
|
|
|||
|
|
@ -1,2 +0,0 @@
|
|||
class Context:
|
||||
"Base class for all documented contexts"
|
||||
|
|
@ -1,7 +1,6 @@
|
|||
import asyncio
|
||||
import csv
|
||||
import hashlib
|
||||
import json
|
||||
import sys
|
||||
import textwrap
|
||||
import time
|
||||
|
|
@ -9,6 +8,9 @@ import urllib
|
|||
from markupsafe import escape
|
||||
|
||||
|
||||
import pint
|
||||
|
||||
from datasette import __version__
|
||||
from datasette.database import QueryInterrupted
|
||||
from datasette.utils.asgi import Request
|
||||
from datasette.utils import (
|
||||
|
|
@ -31,6 +33,8 @@ from datasette.utils.asgi import (
|
|||
BadRequest,
|
||||
)
|
||||
|
||||
ureg = pint.UnitRegistry()
|
||||
|
||||
|
||||
class DatasetteError(Exception):
|
||||
def __init__(
|
||||
|
|
@ -49,43 +53,6 @@ class DatasetteError(Exception):
|
|||
self.message_is_html = message_is_html
|
||||
|
||||
|
||||
class View:
|
||||
async def head(self, request, datasette):
|
||||
if not hasattr(self, "get"):
|
||||
return await self.method_not_allowed(request)
|
||||
response = await self.get(request, datasette)
|
||||
response.body = ""
|
||||
return response
|
||||
|
||||
async def method_not_allowed(self, request):
|
||||
if (
|
||||
request.path.endswith(".json")
|
||||
or request.headers.get("content-type") == "application/json"
|
||||
):
|
||||
response = Response.json(
|
||||
{"ok": False, "error": "Method not allowed"}, status=405
|
||||
)
|
||||
else:
|
||||
response = Response.text("Method not allowed", status=405)
|
||||
return response
|
||||
|
||||
async def options(self, request, datasette):
|
||||
response = Response.text("ok")
|
||||
response.headers["allow"] = ", ".join(
|
||||
method.upper()
|
||||
for method in ("head", "get", "post", "put", "patch", "delete")
|
||||
if hasattr(self, method)
|
||||
)
|
||||
return response
|
||||
|
||||
async def __call__(self, request, datasette):
|
||||
try:
|
||||
handler = getattr(self, request.method.lower())
|
||||
except AttributeError:
|
||||
return await self.method_not_allowed(request)
|
||||
return await handler(request, datasette)
|
||||
|
||||
|
||||
class BaseView:
|
||||
ds = None
|
||||
has_json_alternate = True
|
||||
|
|
@ -98,6 +65,9 @@ class BaseView:
|
|||
response.body = b""
|
||||
return response
|
||||
|
||||
def database_color(self, database):
|
||||
return "ff0000"
|
||||
|
||||
async def method_not_allowed(self, request):
|
||||
if (
|
||||
request.path.endswith(".json")
|
||||
|
|
@ -139,11 +109,11 @@ class BaseView:
|
|||
|
||||
async def render(self, templates, request, context=None):
|
||||
context = context or {}
|
||||
environment = self.ds.get_jinja_environment(request)
|
||||
template = environment.select_template(templates)
|
||||
template = self.ds.jinja_env.select_template(templates)
|
||||
template_context = {
|
||||
**context,
|
||||
**{
|
||||
"database_color": self.database_color,
|
||||
"select_templates": [
|
||||
f"{'*' if template_name == template.name else ''}{template_name}"
|
||||
for template_name in templates
|
||||
|
|
@ -159,7 +129,7 @@ class BaseView:
|
|||
template_context["alternate_url_json"] = alternate_url_json
|
||||
headers.update(
|
||||
{
|
||||
"Link": '<{}>; rel="alternate"; type="application/json+datasette"'.format(
|
||||
"Link": '{}; rel="alternate"; type="application/json+datasette"'.format(
|
||||
alternate_url_json
|
||||
)
|
||||
}
|
||||
|
|
@ -204,8 +174,176 @@ class DataView(BaseView):
|
|||
async def data(self, request):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_templates(self, database, table=None):
|
||||
assert NotImplemented
|
||||
|
||||
async def as_csv(self, request, database):
|
||||
return await stream_csv(self.ds, self.data, request, database)
|
||||
kwargs = {}
|
||||
stream = request.args.get("_stream")
|
||||
# Do not calculate facets or counts:
|
||||
extra_parameters = [
|
||||
"{}=1".format(key)
|
||||
for key in ("_nofacet", "_nocount")
|
||||
if not request.args.get(key)
|
||||
]
|
||||
if extra_parameters:
|
||||
# Replace request object with a new one with modified scope
|
||||
if not request.query_string:
|
||||
new_query_string = "&".join(extra_parameters)
|
||||
else:
|
||||
new_query_string = (
|
||||
request.query_string + "&" + "&".join(extra_parameters)
|
||||
)
|
||||
new_scope = dict(
|
||||
request.scope, query_string=new_query_string.encode("latin-1")
|
||||
)
|
||||
receive = request.receive
|
||||
request = Request(new_scope, receive)
|
||||
if stream:
|
||||
# Some quick soundness checks
|
||||
if not self.ds.setting("allow_csv_stream"):
|
||||
raise BadRequest("CSV streaming is disabled")
|
||||
if request.args.get("_next"):
|
||||
raise BadRequest("_next not allowed for CSV streaming")
|
||||
kwargs["_size"] = "max"
|
||||
# Fetch the first page
|
||||
try:
|
||||
response_or_template_contexts = await self.data(request)
|
||||
if isinstance(response_or_template_contexts, Response):
|
||||
return response_or_template_contexts
|
||||
elif len(response_or_template_contexts) == 4:
|
||||
data, _, _, _ = response_or_template_contexts
|
||||
else:
|
||||
data, _, _ = response_or_template_contexts
|
||||
except (sqlite3.OperationalError, InvalidSql) as e:
|
||||
raise DatasetteError(str(e), title="Invalid SQL", status=400)
|
||||
|
||||
except sqlite3.OperationalError as e:
|
||||
raise DatasetteError(str(e))
|
||||
|
||||
except DatasetteError:
|
||||
raise
|
||||
|
||||
# Convert rows and columns to CSV
|
||||
headings = data["columns"]
|
||||
# if there are expanded_columns we need to add additional headings
|
||||
expanded_columns = set(data.get("expanded_columns") or [])
|
||||
if expanded_columns:
|
||||
headings = []
|
||||
for column in data["columns"]:
|
||||
headings.append(column)
|
||||
if column in expanded_columns:
|
||||
headings.append(f"{column}_label")
|
||||
|
||||
content_type = "text/plain; charset=utf-8"
|
||||
preamble = ""
|
||||
postamble = ""
|
||||
|
||||
trace = request.args.get("_trace")
|
||||
if trace:
|
||||
content_type = "text/html; charset=utf-8"
|
||||
preamble = (
|
||||
"<html><head><title>CSV debug</title></head>"
|
||||
'<body><textarea style="width: 90%; height: 70vh">'
|
||||
)
|
||||
postamble = "</textarea></body></html>"
|
||||
|
||||
async def stream_fn(r):
|
||||
nonlocal data, trace
|
||||
limited_writer = LimitedWriter(r, self.ds.setting("max_csv_mb"))
|
||||
if trace:
|
||||
await limited_writer.write(preamble)
|
||||
writer = csv.writer(EscapeHtmlWriter(limited_writer))
|
||||
else:
|
||||
writer = csv.writer(limited_writer)
|
||||
first = True
|
||||
next = None
|
||||
while first or (next and stream):
|
||||
try:
|
||||
kwargs = {}
|
||||
if next:
|
||||
kwargs["_next"] = next
|
||||
if not first:
|
||||
data, _, _ = await self.data(request, **kwargs)
|
||||
if first:
|
||||
if request.args.get("_header") != "off":
|
||||
await writer.writerow(headings)
|
||||
first = False
|
||||
next = data.get("next")
|
||||
for row in data["rows"]:
|
||||
if any(isinstance(r, bytes) for r in row):
|
||||
new_row = []
|
||||
for column, cell in zip(headings, row):
|
||||
if isinstance(cell, bytes):
|
||||
# If this is a table page, use .urls.row_blob()
|
||||
if data.get("table"):
|
||||
pks = data.get("primary_keys") or []
|
||||
cell = self.ds.absolute_url(
|
||||
request,
|
||||
self.ds.urls.row_blob(
|
||||
database,
|
||||
data["table"],
|
||||
path_from_row_pks(row, pks, not pks),
|
||||
column,
|
||||
),
|
||||
)
|
||||
else:
|
||||
# Otherwise generate URL for this query
|
||||
url = self.ds.absolute_url(
|
||||
request,
|
||||
path_with_format(
|
||||
request=request,
|
||||
format="blob",
|
||||
extra_qs={
|
||||
"_blob_column": column,
|
||||
"_blob_hash": hashlib.sha256(
|
||||
cell
|
||||
).hexdigest(),
|
||||
},
|
||||
replace_format="csv",
|
||||
),
|
||||
)
|
||||
cell = url.replace("&_nocount=1", "").replace(
|
||||
"&_nofacet=1", ""
|
||||
)
|
||||
new_row.append(cell)
|
||||
row = new_row
|
||||
if not expanded_columns:
|
||||
# Simple path
|
||||
await writer.writerow(row)
|
||||
else:
|
||||
# Look for {"value": "label": } dicts and expand
|
||||
new_row = []
|
||||
for heading, cell in zip(data["columns"], row):
|
||||
if heading in expanded_columns:
|
||||
if cell is None:
|
||||
new_row.extend(("", ""))
|
||||
else:
|
||||
assert isinstance(cell, dict)
|
||||
new_row.append(cell["value"])
|
||||
new_row.append(cell["label"])
|
||||
else:
|
||||
new_row.append(cell)
|
||||
await writer.writerow(new_row)
|
||||
except Exception as e:
|
||||
sys.stderr.write("Caught this error: {}\n".format(e))
|
||||
sys.stderr.flush()
|
||||
await r.write(str(e))
|
||||
return
|
||||
await limited_writer.write(postamble)
|
||||
|
||||
headers = {}
|
||||
if self.ds.cors:
|
||||
add_cors_headers(headers)
|
||||
if request.args.get("_dl", None):
|
||||
if not trace:
|
||||
content_type = "text/csv; charset=utf-8"
|
||||
disposition = 'attachment; filename="{}.csv"'.format(
|
||||
request.url_vars.get("table", database)
|
||||
)
|
||||
headers["content-disposition"] = disposition
|
||||
|
||||
return AsgiStream(stream_fn, headers=headers, content_type=content_type)
|
||||
|
||||
async def get(self, request):
|
||||
db = await self.ds.resolve_database(request)
|
||||
|
|
@ -271,6 +409,10 @@ class DataView(BaseView):
|
|||
|
||||
end = time.perf_counter()
|
||||
data["query_ms"] = (end - start) * 1000
|
||||
for key in ("source", "source_url", "license", "license_url"):
|
||||
value = self.ds.metadata(key)
|
||||
if value:
|
||||
data[key] = value
|
||||
|
||||
# Special case for .jsono extension - redirect to _shape=objects
|
||||
if _format == "jsono":
|
||||
|
|
@ -298,8 +440,6 @@ class DataView(BaseView):
|
|||
table=data.get("table"),
|
||||
request=request,
|
||||
view_name=self.name,
|
||||
truncated=False, # TODO: support this
|
||||
error=data.get("error"),
|
||||
# These will be deprecated in Datasette 1.0:
|
||||
args=request.args,
|
||||
data=data,
|
||||
|
|
@ -378,7 +518,7 @@ class DataView(BaseView):
|
|||
},
|
||||
}
|
||||
if "metadata" not in context:
|
||||
context["metadata"] = await self.ds.get_instance_metadata()
|
||||
context["metadata"] = self.ds.metadata
|
||||
r = await self.render(templates, request=request, context=context)
|
||||
if status_code is not None:
|
||||
r.status = status_code
|
||||
|
|
@ -406,170 +546,3 @@ class DataView(BaseView):
|
|||
|
||||
def _error(messages, status=400):
|
||||
return Response.json({"ok": False, "errors": messages}, status=status)
|
||||
|
||||
|
||||
async def stream_csv(datasette, fetch_data, request, database):
|
||||
kwargs = {}
|
||||
stream = request.args.get("_stream")
|
||||
# Do not calculate facets or counts:
|
||||
extra_parameters = [
|
||||
"{}=1".format(key)
|
||||
for key in ("_nofacet", "_nocount")
|
||||
if not request.args.get(key)
|
||||
]
|
||||
if extra_parameters:
|
||||
# Replace request object with a new one with modified scope
|
||||
if not request.query_string:
|
||||
new_query_string = "&".join(extra_parameters)
|
||||
else:
|
||||
new_query_string = request.query_string + "&" + "&".join(extra_parameters)
|
||||
new_scope = dict(request.scope, query_string=new_query_string.encode("latin-1"))
|
||||
receive = request.receive
|
||||
request = Request(new_scope, receive)
|
||||
if stream:
|
||||
# Some quick soundness checks
|
||||
if not datasette.setting("allow_csv_stream"):
|
||||
raise BadRequest("CSV streaming is disabled")
|
||||
if request.args.get("_next"):
|
||||
raise BadRequest("_next not allowed for CSV streaming")
|
||||
kwargs["_size"] = "max"
|
||||
# Fetch the first page
|
||||
try:
|
||||
response_or_template_contexts = await fetch_data(request)
|
||||
if isinstance(response_or_template_contexts, Response):
|
||||
return response_or_template_contexts
|
||||
elif len(response_or_template_contexts) == 4:
|
||||
data, _, _, _ = response_or_template_contexts
|
||||
else:
|
||||
data, _, _ = response_or_template_contexts
|
||||
except (sqlite3.OperationalError, InvalidSql) as e:
|
||||
raise DatasetteError(str(e), title="Invalid SQL", status=400)
|
||||
|
||||
except sqlite3.OperationalError as e:
|
||||
raise DatasetteError(str(e))
|
||||
|
||||
except DatasetteError:
|
||||
raise
|
||||
|
||||
# Convert rows and columns to CSV
|
||||
headings = data["columns"]
|
||||
# if there are expanded_columns we need to add additional headings
|
||||
expanded_columns = set(data.get("expanded_columns") or [])
|
||||
if expanded_columns:
|
||||
headings = []
|
||||
for column in data["columns"]:
|
||||
headings.append(column)
|
||||
if column in expanded_columns:
|
||||
headings.append(f"{column}_label")
|
||||
|
||||
content_type = "text/plain; charset=utf-8"
|
||||
preamble = ""
|
||||
postamble = ""
|
||||
|
||||
trace = request.args.get("_trace")
|
||||
if trace:
|
||||
content_type = "text/html; charset=utf-8"
|
||||
preamble = (
|
||||
"<html><head><title>CSV debug</title></head>"
|
||||
'<body><textarea style="width: 90%; height: 70vh">'
|
||||
)
|
||||
postamble = "</textarea></body></html>"
|
||||
|
||||
async def stream_fn(r):
|
||||
nonlocal data, trace
|
||||
limited_writer = LimitedWriter(r, datasette.setting("max_csv_mb"))
|
||||
if trace:
|
||||
await limited_writer.write(preamble)
|
||||
writer = csv.writer(EscapeHtmlWriter(limited_writer))
|
||||
else:
|
||||
writer = csv.writer(limited_writer)
|
||||
first = True
|
||||
next = None
|
||||
while first or (next and stream):
|
||||
try:
|
||||
kwargs = {}
|
||||
if next:
|
||||
kwargs["_next"] = next
|
||||
if not first:
|
||||
data, _, _ = await fetch_data(request, **kwargs)
|
||||
if first:
|
||||
if request.args.get("_header") != "off":
|
||||
await writer.writerow(headings)
|
||||
first = False
|
||||
next = data.get("next")
|
||||
for row in data["rows"]:
|
||||
if any(isinstance(r, bytes) for r in row):
|
||||
new_row = []
|
||||
for column, cell in zip(headings, row):
|
||||
if isinstance(cell, bytes):
|
||||
# If this is a table page, use .urls.row_blob()
|
||||
if data.get("table"):
|
||||
pks = data.get("primary_keys") or []
|
||||
cell = datasette.absolute_url(
|
||||
request,
|
||||
datasette.urls.row_blob(
|
||||
database,
|
||||
data["table"],
|
||||
path_from_row_pks(row, pks, not pks),
|
||||
column,
|
||||
),
|
||||
)
|
||||
else:
|
||||
# Otherwise generate URL for this query
|
||||
url = datasette.absolute_url(
|
||||
request,
|
||||
path_with_format(
|
||||
request=request,
|
||||
format="blob",
|
||||
extra_qs={
|
||||
"_blob_column": column,
|
||||
"_blob_hash": hashlib.sha256(
|
||||
cell
|
||||
).hexdigest(),
|
||||
},
|
||||
replace_format="csv",
|
||||
),
|
||||
)
|
||||
cell = url.replace("&_nocount=1", "").replace(
|
||||
"&_nofacet=1", ""
|
||||
)
|
||||
new_row.append(cell)
|
||||
row = new_row
|
||||
if not expanded_columns:
|
||||
# Simple path
|
||||
await writer.writerow(row)
|
||||
else:
|
||||
# Look for {"value": "label": } dicts and expand
|
||||
new_row = []
|
||||
for heading, cell in zip(data["columns"], row):
|
||||
if heading in expanded_columns:
|
||||
if cell is None:
|
||||
new_row.extend(("", ""))
|
||||
else:
|
||||
if not isinstance(cell, dict):
|
||||
new_row.extend((cell, ""))
|
||||
else:
|
||||
new_row.append(cell["value"])
|
||||
new_row.append(cell["label"])
|
||||
else:
|
||||
new_row.append(cell)
|
||||
await writer.writerow(new_row)
|
||||
except Exception as ex:
|
||||
sys.stderr.write("Caught this error: {}\n".format(ex))
|
||||
sys.stderr.flush()
|
||||
await r.write(str(ex))
|
||||
return
|
||||
await limited_writer.write(postamble)
|
||||
|
||||
headers = {}
|
||||
if datasette.cors:
|
||||
add_cors_headers(headers)
|
||||
if request.args.get("_dl", None):
|
||||
if not trace:
|
||||
content_type = "text/csv; charset=utf-8"
|
||||
disposition = 'attachment; filename="{}.csv"'.format(
|
||||
request.url_vars.get("table", database)
|
||||
)
|
||||
headers["content-disposition"] = disposition
|
||||
|
||||
return AsgiStream(stream_fn, headers=headers, content_type=content_type)
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -1,12 +1,7 @@
|
|||
import hashlib
|
||||
import json
|
||||
|
||||
from datasette.plugins import pm
|
||||
from datasette.utils import (
|
||||
add_cors_headers,
|
||||
await_me_maybe,
|
||||
make_slot_function,
|
||||
CustomJSONEncoder,
|
||||
)
|
||||
from datasette.utils import add_cors_headers, CustomJSONEncoder
|
||||
from datasette.utils.asgi import Response
|
||||
from datasette.version import __version__
|
||||
|
||||
|
|
@ -25,49 +20,28 @@ class IndexView(BaseView):
|
|||
|
||||
async def get(self, request):
|
||||
as_format = request.url_vars["format"]
|
||||
await self.ds.ensure_permission(action="view-instance", actor=request.actor)
|
||||
|
||||
# Get all allowed databases and tables in bulk
|
||||
db_page = await self.ds.allowed_resources(
|
||||
"view-database", request.actor, include_is_private=True
|
||||
)
|
||||
allowed_databases = [r async for r in db_page.all()]
|
||||
allowed_db_dict = {r.parent: r for r in allowed_databases}
|
||||
|
||||
# Group tables by database
|
||||
tables_by_db = {}
|
||||
table_page = await self.ds.allowed_resources(
|
||||
"view-table", request.actor, include_is_private=True
|
||||
)
|
||||
async for t in table_page.all():
|
||||
if t.parent not in tables_by_db:
|
||||
tables_by_db[t.parent] = {}
|
||||
tables_by_db[t.parent][t.child] = t
|
||||
|
||||
await self.ds.ensure_permissions(request.actor, ["view-instance"])
|
||||
databases = []
|
||||
# Iterate over allowed databases instead of all databases
|
||||
for name in allowed_db_dict.keys():
|
||||
db = self.ds.databases[name]
|
||||
database_private = allowed_db_dict[name].private
|
||||
|
||||
# Get allowed tables/views for this database
|
||||
allowed_for_db = tables_by_db.get(name, {})
|
||||
|
||||
# Get table names from allowed set instead of db.table_names()
|
||||
table_names = [child_name for child_name in allowed_for_db.keys()]
|
||||
|
||||
for name, db in self.ds.databases.items():
|
||||
database_visible, database_private = await self.ds.check_visibility(
|
||||
request.actor,
|
||||
"view-database",
|
||||
name,
|
||||
)
|
||||
if not database_visible:
|
||||
continue
|
||||
table_names = await db.table_names()
|
||||
hidden_table_names = set(await db.hidden_table_names())
|
||||
|
||||
# Determine which allowed items are views
|
||||
view_names_set = set(await db.view_names())
|
||||
views = [
|
||||
{"name": child_name, "private": resource.private}
|
||||
for child_name, resource in allowed_for_db.items()
|
||||
if child_name in view_names_set
|
||||
]
|
||||
|
||||
# Filter to just tables (not views) for table processing
|
||||
table_names = [name for name in table_names if name not in view_names_set]
|
||||
views = []
|
||||
for view_name in await db.view_names():
|
||||
view_visible, view_private = await self.ds.check_visibility(
|
||||
request.actor,
|
||||
"view-table",
|
||||
(name, view_name),
|
||||
)
|
||||
if view_visible:
|
||||
views.append({"name": view_name, "private": view_private})
|
||||
|
||||
# Perform counts only for immutable or DBS with <= COUNT_TABLE_LIMIT tables
|
||||
table_counts = {}
|
||||
|
|
@ -79,10 +53,13 @@ class IndexView(BaseView):
|
|||
|
||||
tables = {}
|
||||
for table in table_names:
|
||||
# Check if table is in allowed set
|
||||
if table not in allowed_for_db:
|
||||
visible, private = await self.ds.check_visibility(
|
||||
request.actor,
|
||||
"view-table",
|
||||
(name, table),
|
||||
)
|
||||
if not visible:
|
||||
continue
|
||||
|
||||
table_columns = await db.table_columns(table)
|
||||
tables[table] = {
|
||||
"name": table,
|
||||
|
|
@ -92,7 +69,7 @@ class IndexView(BaseView):
|
|||
"hidden": table in hidden_table_names,
|
||||
"fts_table": await db.fts_table(table),
|
||||
"num_relationships_for_sorting": 0,
|
||||
"private": allowed_for_db[table].private,
|
||||
"private": private,
|
||||
}
|
||||
|
||||
if request.args.get("_sort") == "relationships" or not table_counts:
|
||||
|
|
@ -128,7 +105,9 @@ class IndexView(BaseView):
|
|||
{
|
||||
"name": name,
|
||||
"hash": db.hash,
|
||||
"color": db.color,
|
||||
"color": db.hash[:6]
|
||||
if db.hash
|
||||
else hashlib.md5(name.encode("utf8")).hexdigest()[:6],
|
||||
"path": self.ds.urls.database(name),
|
||||
"tables_and_views_truncated": tables_and_views_truncated,
|
||||
"tables_and_views_more": (len(visible_tables) + len(views))
|
||||
|
|
@ -150,41 +129,20 @@ class IndexView(BaseView):
|
|||
if self.ds.cors:
|
||||
add_cors_headers(headers)
|
||||
return Response(
|
||||
json.dumps(
|
||||
{
|
||||
"databases": {db["name"]: db for db in databases},
|
||||
"metadata": await self.ds.get_instance_metadata(),
|
||||
},
|
||||
cls=CustomJSONEncoder,
|
||||
),
|
||||
json.dumps({db["name"]: db for db in databases}, cls=CustomJSONEncoder),
|
||||
content_type="application/json; charset=utf-8",
|
||||
headers=headers,
|
||||
)
|
||||
else:
|
||||
homepage_actions = []
|
||||
for hook in pm.hook.homepage_actions(
|
||||
datasette=self.ds,
|
||||
actor=request.actor,
|
||||
request=request,
|
||||
):
|
||||
extra_links = await await_me_maybe(hook)
|
||||
if extra_links:
|
||||
homepage_actions.extend(extra_links)
|
||||
alternative_homepage = request.path == "/-/"
|
||||
return await self.render(
|
||||
["default:index.html" if alternative_homepage else "index.html"],
|
||||
["index.html"],
|
||||
request=request,
|
||||
context={
|
||||
"databases": databases,
|
||||
"metadata": await self.ds.get_instance_metadata(),
|
||||
"metadata": self.ds.metadata(),
|
||||
"datasette_version": __version__,
|
||||
"private": not await self.ds.allowed(
|
||||
action="view-instance", actor=None
|
||||
"private": not await self.ds.permission_allowed(
|
||||
None, "view-instance", default=True
|
||||
),
|
||||
"top_homepage": make_slot_function(
|
||||
"top_homepage", self.ds, request
|
||||
),
|
||||
"homepage_actions": homepage_actions,
|
||||
"noindex": request.path == "/-/",
|
||||
},
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,15 +1,13 @@
|
|||
from datasette.utils.asgi import NotFound, Forbidden, Response
|
||||
from datasette.database import QueryInterrupted
|
||||
from datasette.events import UpdateRowEvent, DeleteRowEvent
|
||||
from datasette.resources import TableResource
|
||||
from .base import DataView, BaseView, _error
|
||||
from datasette.utils import (
|
||||
await_me_maybe,
|
||||
make_slot_function,
|
||||
tilde_decode,
|
||||
urlsafe_components,
|
||||
to_css_class,
|
||||
escape_sqlite,
|
||||
row_sql_params_pks,
|
||||
)
|
||||
from datasette.plugins import pm
|
||||
import json
|
||||
import sqlite_utils
|
||||
from .table import display_columns_and_rows
|
||||
|
|
@ -20,16 +18,18 @@ class RowView(DataView):
|
|||
|
||||
async def data(self, request, default_labels=False):
|
||||
resolved = await self.ds.resolve_row(request)
|
||||
db = resolved.db
|
||||
database = db.name
|
||||
database = resolved.db.name
|
||||
table = resolved.table
|
||||
pk_values = resolved.pk_values
|
||||
|
||||
# Ensure user has permission to view this row
|
||||
visible, private = await self.ds.check_visibility(
|
||||
request.actor,
|
||||
action="view-table",
|
||||
resource=TableResource(database=database, table=table),
|
||||
permissions=[
|
||||
("view-table", (database, table)),
|
||||
("view-database", database),
|
||||
"view-instance",
|
||||
],
|
||||
)
|
||||
if not visible:
|
||||
raise Forbidden("You do not have permission to view this table")
|
||||
|
|
@ -51,30 +51,14 @@ class RowView(DataView):
|
|||
rows,
|
||||
link_column=False,
|
||||
truncate_cells=0,
|
||||
request=request,
|
||||
)
|
||||
for column in display_columns:
|
||||
column["sortable"] = False
|
||||
|
||||
row_actions = []
|
||||
for hook in pm.hook.row_actions(
|
||||
datasette=self.ds,
|
||||
actor=request.actor,
|
||||
request=request,
|
||||
database=database,
|
||||
table=table,
|
||||
row=rows[0],
|
||||
):
|
||||
extra_links = await await_me_maybe(hook)
|
||||
if extra_links:
|
||||
row_actions.extend(extra_links)
|
||||
|
||||
return {
|
||||
"private": private,
|
||||
"foreign_key_tables": await self.foreign_key_tables(
|
||||
database, table, pk_values
|
||||
),
|
||||
"database_color": db.color,
|
||||
"display_columns": display_columns,
|
||||
"display_rows": display_rows,
|
||||
"custom_table_templates": [
|
||||
|
|
@ -82,16 +66,10 @@ class RowView(DataView):
|
|||
f"_table-row-{to_css_class(database)}-{to_css_class(table)}.html",
|
||||
"_table.html",
|
||||
],
|
||||
"row_actions": row_actions,
|
||||
"top_row": make_slot_function(
|
||||
"top_row",
|
||||
self.ds,
|
||||
request,
|
||||
database=resolved.db.name,
|
||||
table=resolved.table,
|
||||
row=rows[0],
|
||||
),
|
||||
"metadata": {},
|
||||
"metadata": (self.ds.metadata("databases") or {})
|
||||
.get(database, {})
|
||||
.get("tables", {})
|
||||
.get(table, {}),
|
||||
}
|
||||
|
||||
data = {
|
||||
|
|
@ -101,6 +79,7 @@ class RowView(DataView):
|
|||
"columns": columns,
|
||||
"primary_keys": resolved.pks,
|
||||
"primary_key_values": pk_values,
|
||||
"units": self.ds.table_metadata(database, table).get("units", {}),
|
||||
}
|
||||
|
||||
if "foreign_key_tables" in (request.args.get("_extras") or "").split(","):
|
||||
|
|
@ -182,10 +161,8 @@ async def _resolve_row_and_check_permission(datasette, request, permission):
|
|||
return False, _error(["Record not found: {}".format(e.pk_values)], 404)
|
||||
|
||||
# Ensure user has permission to delete this row
|
||||
if not await datasette.allowed(
|
||||
action=permission,
|
||||
resource=TableResource(database=resolved.db.name, table=resolved.table),
|
||||
actor=request.actor,
|
||||
if not await datasette.permission_allowed(
|
||||
request.actor, permission, resource=(resolved.db.name, resolved.table)
|
||||
):
|
||||
return False, _error(["Permission denied"], 403)
|
||||
|
||||
|
|
@ -214,15 +191,6 @@ class RowDeleteView(BaseView):
|
|||
except Exception as e:
|
||||
return _error([str(e)], 500)
|
||||
|
||||
await self.ds.track_event(
|
||||
DeleteRowEvent(
|
||||
actor=request.actor,
|
||||
database=resolved.db.name,
|
||||
table=resolved.table,
|
||||
pks=resolved.pk_values,
|
||||
)
|
||||
)
|
||||
|
||||
return Response.json({"ok": True}, status=200)
|
||||
|
||||
|
||||
|
|
@ -247,26 +215,14 @@ class RowUpdateView(BaseView):
|
|||
|
||||
if not isinstance(data, dict):
|
||||
return _error(["JSON must be a dictionary"])
|
||||
if "update" not in data or not isinstance(data["update"], dict):
|
||||
if not "update" in data or not isinstance(data["update"], dict):
|
||||
return _error(["JSON must contain an update dictionary"])
|
||||
|
||||
invalid_keys = set(data.keys()) - {"update", "return", "alter"}
|
||||
if invalid_keys:
|
||||
return _error(["Invalid keys: {}".format(", ".join(invalid_keys))])
|
||||
|
||||
update = data["update"]
|
||||
|
||||
alter = data.get("alter")
|
||||
if alter and not await self.ds.allowed(
|
||||
action="alter-table",
|
||||
resource=TableResource(database=resolved.db.name, table=resolved.table),
|
||||
actor=request.actor,
|
||||
):
|
||||
return _error(["Permission denied for alter-table"], 403)
|
||||
|
||||
def update_row(conn):
|
||||
sqlite_utils.Database(conn)[resolved.table].update(
|
||||
resolved.pk_values, update, alter=alter
|
||||
resolved.pk_values, update
|
||||
)
|
||||
|
||||
try:
|
||||
|
|
@ -279,15 +235,6 @@ class RowUpdateView(BaseView):
|
|||
results = await resolved.db.execute(
|
||||
resolved.sql, resolved.params, truncate=True
|
||||
)
|
||||
result["row"] = results.dicts()[0]
|
||||
|
||||
await self.ds.track_event(
|
||||
UpdateRowEvent(
|
||||
actor=request.actor,
|
||||
database=resolved.db.name,
|
||||
table=resolved.table,
|
||||
pks=resolved.pk_values,
|
||||
)
|
||||
)
|
||||
|
||||
rows = list(results.rows)
|
||||
result["row"] = dict(rows[0])
|
||||
return Response.json(result, status=200)
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -1,21 +0,0 @@
|
|||
from datasette import hookimpl
|
||||
|
||||
# Test command:
|
||||
# datasette fixtures.db \ --plugins-dir=demos/plugins/
|
||||
# \ --static static:demos/plugins/static
|
||||
|
||||
# Create a set with view names that qualify for this JS, since plugins won't do anything on other pages
|
||||
# Same pattern as in Nteract data explorer
|
||||
# https://github.com/hydrosquall/datasette-nteract-data-explorer/blob/main/datasette_nteract_data_explorer/__init__.py#L77
|
||||
PERMITTED_VIEWS = {"table", "query", "database"}
|
||||
|
||||
|
||||
@hookimpl
|
||||
def extra_js_urls(view_name):
|
||||
print(view_name)
|
||||
if view_name in PERMITTED_VIEWS:
|
||||
return [
|
||||
{
|
||||
"url": "/static/table-example-plugins.js",
|
||||
}
|
||||
]
|
||||
|
|
@ -1,100 +0,0 @@
|
|||
/**
|
||||
* Example usage of Datasette JS Manager API
|
||||
*/
|
||||
|
||||
document.addEventListener("datasette_init", function (evt) {
|
||||
const { detail: manager } = evt;
|
||||
// === Demo plugins: remove before merge===
|
||||
addPlugins(manager);
|
||||
});
|
||||
|
||||
/**
|
||||
* Examples for to test datasette JS api
|
||||
*/
|
||||
const addPlugins = (manager) => {
|
||||
|
||||
manager.registerPlugin("column-name-plugin", {
|
||||
version: 0.1,
|
||||
makeColumnActions: (columnMeta) => {
|
||||
const { column } = columnMeta;
|
||||
|
||||
return [
|
||||
{
|
||||
label: "Copy name to clipboard",
|
||||
onClick: (evt) => copyToClipboard(column),
|
||||
},
|
||||
{
|
||||
label: "Log column metadata to console",
|
||||
onClick: (evt) => console.log(column),
|
||||
},
|
||||
];
|
||||
},
|
||||
});
|
||||
|
||||
manager.registerPlugin("panel-plugin-graphs", {
|
||||
version: 0.1,
|
||||
makeAboveTablePanelConfigs: () => {
|
||||
return [
|
||||
{
|
||||
id: 'first-panel',
|
||||
label: "First",
|
||||
render: node => {
|
||||
const description = document.createElement('p');
|
||||
description.innerText = 'Hello world';
|
||||
node.appendChild(description);
|
||||
}
|
||||
},
|
||||
{
|
||||
id: 'second-panel',
|
||||
label: "Second",
|
||||
render: node => {
|
||||
const iframe = document.createElement('iframe');
|
||||
iframe.src = "https://observablehq.com/embed/@d3/sortable-bar-chart?cell=viewof+order&cell=chart";
|
||||
iframe.width = 800;
|
||||
iframe.height = 635;
|
||||
iframe.frameborder = '0';
|
||||
node.appendChild(iframe);
|
||||
}
|
||||
},
|
||||
];
|
||||
},
|
||||
});
|
||||
|
||||
manager.registerPlugin("panel-plugin-maps", {
|
||||
version: 0.1,
|
||||
makeAboveTablePanelConfigs: () => {
|
||||
return [
|
||||
{
|
||||
// ID only has to be unique within a plugin, manager namespaces for you
|
||||
id: 'first-map-panel',
|
||||
label: "Map plugin",
|
||||
// datasette-vega, leafleft can provide a "render" function
|
||||
render: node => node.innerHTML = "Here sits a map",
|
||||
},
|
||||
{
|
||||
id: 'second-panel',
|
||||
label: "Image plugin",
|
||||
render: node => {
|
||||
const img = document.createElement('img');
|
||||
img.src = 'https://datasette.io/static/datasette-logo.svg'
|
||||
node.appendChild(img);
|
||||
},
|
||||
}
|
||||
];
|
||||
},
|
||||
});
|
||||
|
||||
// Future: dispatch message to some other part of the page with CustomEvent API
|
||||
// Could use to drive filter/sort query builder actions without page refresh.
|
||||
}
|
||||
|
||||
|
||||
|
||||
async function copyToClipboard(str) {
|
||||
try {
|
||||
await navigator.clipboard.writeText(str);
|
||||
} catch (err) {
|
||||
/** Rejected - text failed to copy to the clipboard. Browsers didn't give permission */
|
||||
console.error('Failed to copy: ', err);
|
||||
}
|
||||
}
|
||||
BIN
docs/_static/datasette-favicon.png
vendored
BIN
docs/_static/datasette-favicon.png
vendored
Binary file not shown.
|
Before Width: | Height: | Size: 208 B |
31
docs/_templates/base.html
vendored
31
docs/_templates/base.html
vendored
|
|
@ -4,34 +4,3 @@
|
|||
{{ super() }}
|
||||
<script defer data-domain="docs.datasette.io" src="https://plausible.io/js/plausible.js"></script>
|
||||
{% endblock %}
|
||||
|
||||
{% block scripts %}
|
||||
{{ super() }}
|
||||
<script>
|
||||
document.addEventListener("DOMContentLoaded", function() {
|
||||
// Show banner linking to /stable/ if this is a /latest/ page
|
||||
if (!/\/latest\//.test(location.pathname)) {
|
||||
return;
|
||||
}
|
||||
var stableUrl = location.pathname.replace("/latest/", "/stable/");
|
||||
// Check it's not a 404
|
||||
fetch(stableUrl, { method: "HEAD" }).then((response) => {
|
||||
if (response.status === 200) {
|
||||
var warning = document.createElement("div");
|
||||
warning.className = "admonition warning";
|
||||
warning.innerHTML = `
|
||||
<p class="first admonition-title">Note</p>
|
||||
<p class="last">
|
||||
This documentation covers the <strong>development version</strong> of Datasette.
|
||||
</p>
|
||||
<p>
|
||||
See <a href="${stableUrl}">this page</a> for the current stable release.
|
||||
</p>
|
||||
`;
|
||||
var mainArticle = document.querySelector("article[role=main]");
|
||||
mainArticle.insertBefore(warning, mainArticle.firstChild);
|
||||
}
|
||||
});
|
||||
});
|
||||
</script>
|
||||
{% endblock %}
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -4,565 +4,6 @@
|
|||
Changelog
|
||||
=========
|
||||
|
||||
.. _v1_0_a23:
|
||||
|
||||
1.0a23 (2025-12-02)
|
||||
-------------------
|
||||
|
||||
- Fix for bug where a stale database entry in ``internal.db`` could cause a 500 error on the homepage. (:issue:`2605`)
|
||||
- Cosmetic improvement to ``/-/actions`` page. (:issue:`2599`)
|
||||
|
||||
.. _v1_0_a22:
|
||||
|
||||
1.0a22 (2025-11-13)
|
||||
-------------------
|
||||
|
||||
- ``datasette serve --default-deny`` option for running Datasette configured to :ref:`deny all permissions by default <authentication_default_deny>`. (:issue:`2592`)
|
||||
- ``datasette.is_client()`` method for detecting if code is :ref:`executing inside a datasette.client request <internals_datasette_is_client>`. (:issue:`2594`)
|
||||
- ``datasette.pm`` property can now be used to :ref:`register and unregister plugins in tests <testing_plugins_register_in_test>`. (:issue:`2595`)
|
||||
|
||||
.. _v1_0_a21:
|
||||
|
||||
1.0a21 (2025-11-05)
|
||||
-------------------
|
||||
|
||||
- Fixes an **open redirect** security issue: Datasette instances would redirect to ``example.com/foo/bar`` if you accessed the path ``//example.com/foo/bar``. Thanks to `James Jefferies <https://github.com/jamesjefferies>`__ for the fix. (:issue:`2429`)
|
||||
- Fixed ``datasette publish cloudrun`` to work with changes to the underlying Cloud Run architecture. (:issue:`2511`)
|
||||
- New ``datasette --get /path --headers`` option for inspecting the headers returned by a path. (:issue:`2578`)
|
||||
- New ``datasette.client.get(..., skip_permission_checks=True)`` parameter to bypass permission checks when making requests using the internal client. (:issue:`2583`)
|
||||
|
||||
.. _v0_65_2:
|
||||
|
||||
0.65.2 (2025-11-05)
|
||||
-------------------
|
||||
|
||||
- Fixes an **open redirect** security issue: Datasette instances would redirect to ``example.com/foo/bar`` if you accessed the path ``//example.com/foo/bar``. Thanks to `James Jefferies <https://github.com/jamesjefferies>`__ for the fix. (:issue:`2429`)
|
||||
- Upgraded for compatibility with Python 3.14.
|
||||
- Fixed ``datasette publish cloudrun`` to work with changes to the underlying Cloud Run architecture. (:issue:`2511`)
|
||||
- Minor upgrades to fix warnings, including ``pkg_resources`` deprecation.
|
||||
|
||||
.. _v1_0_a20:
|
||||
|
||||
1.0a20 (2025-11-03)
|
||||
-------------------
|
||||
|
||||
This alpha introduces a major breaking change prior to the 1.0 release of Datasette concerning how Datasette's permission system works.
|
||||
|
||||
Permission system redesign
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Previously the permission system worked using ``datasette.permission_allowed()`` checks which consulted all available plugins in turn to determine whether a given actor was allowed to perform a given action on a given resource.
|
||||
|
||||
This approach could become prohibitively expensive for large lists of items - for example to determine the list of tables that a user could view in a large Datasette instance each plugin implementation of that hook would be fired for every table.
|
||||
|
||||
The new design uses SQL queries against Datasette's internal :ref:`catalog tables <internals_internal>` to derive the list of resources for which an actor has permission for a given action. This turns an N x M problem (N resources, M plugins) into a single SQL query.
|
||||
|
||||
Plugins can use the new :ref:`plugin_hook_permission_resources_sql` hook to return SQL fragments which will be used as part of that query.
|
||||
|
||||
Plugins that use any of the following features will need to be updated to work with this and following alphas (and Datasette 1.0 stable itself):
|
||||
|
||||
- Checking permissions with ``datasette.permission_allowed()`` - this method has been replaced with :ref:`datasette.allowed() <datasette_allowed>`.
|
||||
- Implementing the ``permission_allowed()`` plugin hook - this hook has been removed in favor of :ref:`permission_resources_sql() <plugin_hook_permission_resources_sql>`.
|
||||
- Using ``register_permissions()`` to register permissions - this hook has been removed in favor of :ref:`register_actions() <plugin_register_actions>`.
|
||||
|
||||
Consult the :ref:`v1.0a20 upgrade guide <upgrade_guide_v1_a20>` for further details on how to upgrade affected plugins.
|
||||
|
||||
Plugins can now make use of two new internal methods to help resolve permission checks:
|
||||
|
||||
- :ref:`datasette.allowed_resources() <datasette_allowed_resources>` returns a ``PaginatedResources`` object with a ``.resources`` list of ``Resource`` instances that an actor is allowed to access for a given action (and a ``.next`` token for pagination).
|
||||
- :ref:`datasette.allowed_resources_sql() <datasette_allowed_resources_sql>` returns the SQL and parameters that can be executed against the internal catalog tables to determine which resources an actor is allowed to access for a given action. This can be combined with further SQL to perform advanced custom filtering.
|
||||
|
||||
Related changes:
|
||||
|
||||
- The way ``datasette --root`` works has changed. Running Datasette with this flag now causes the root actor to pass *all* permission checks. (:issue:`2521`)
|
||||
|
||||
- Permission debugging improvements:
|
||||
|
||||
- The ``/-/allowed`` endpoint shows resources the user is allowed to interact with for different actions.
|
||||
- ``/-/rules`` shows the raw allow/deny rules that apply to different permission checks.
|
||||
- ``/-/actions`` lists every available action.
|
||||
- ``/-/check`` can be used to try out different permission checks for the current actor.
|
||||
|
||||
Other changes
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
- The internal ``catalog_views`` table now tracks SQLite views alongside tables in the introspection database. (:issue:`2495`)
|
||||
- Hitting the ``/`` brings up a search interface for navigating to tables that the current user can view. A new ``/-/tables`` endpoint supports this functionality. (:issue:`2523`)
|
||||
- Datasette attempts to detect some configuration errors on startup.
|
||||
- Datasette now supports Python 3.14 and no longer tests against Python 3.9.
|
||||
|
||||
.. _v1_0_a19:
|
||||
|
||||
1.0a19 (2025-04-21)
|
||||
-------------------
|
||||
|
||||
- Tiny cosmetic bug fix for mobile display of table rows. (:issue:`2479`)
|
||||
|
||||
.. _v1_0_a18:
|
||||
|
||||
1.0a18 (2025-04-16)
|
||||
-------------------
|
||||
|
||||
- Fix for incorrect foreign key references in the internal database schema. (:issue:`2466`)
|
||||
- The ``prepare_connection()`` hook no longer runs for the internal database. (:issue:`2468`)
|
||||
- Fixed bug where ``link:`` HTTP headers used invalid syntax. (:issue:`2470`)
|
||||
- No longer tested against Python 3.8. Now tests against Python 3.13.
|
||||
- FTS tables are now hidden by default if they correspond to a content table. (:issue:`2477`)
|
||||
- Fixed bug with foreign key links to rows in databases with filenames containing a special character. Thanks, `Jack Stratton <https://github.com/phroa>`__. (`#2476 <https://github.com/simonw/datasette/pull/2476>`__)
|
||||
|
||||
.. _v1_0_a17:
|
||||
|
||||
1.0a17 (2025-02-06)
|
||||
-------------------
|
||||
|
||||
- ``DATASETTE_SSL_KEYFILE`` and ``DATASETTE_SSL_CERTFILE`` environment variables as alternatives to ``--ssl-keyfile`` and ``--ssl-certfile``. Thanks, Alex Garcia. (:issue:`2422`)
|
||||
- ``SQLITE_EXTENSIONS`` environment variable has been renamed to ``DATASETTE_LOAD_EXTENSION``. (:issue:`2424`)
|
||||
- ``datasette serve`` environment variables are now :ref:`documented here <cli_datasette_serve_env>`.
|
||||
- The :ref:`plugin_hook_register_magic_parameters` plugin hook can now register async functions. (:issue:`2441`)
|
||||
- Datasette is now tested against Python 3.13.
|
||||
- Breadcrumbs on database and table pages now include a consistent self-link for resetting query string parameters. (:issue:`2454`)
|
||||
- Fixed issue where Datasette could crash on ``metadata.json`` with nested values. (:issue:`2455`)
|
||||
- New internal methods ``datasette.set_actor_cookie()`` and ``datasette.delete_actor_cookie()``, :ref:`described here <authentication_ds_actor>`. (:issue:`1690`)
|
||||
- ``/-/permissions`` page now shows a list of all permissions registered by plugins. (:issue:`1943`)
|
||||
- If a table has a single unique text column Datasette now detects that as the foreign key label for that table. (:issue:`2458`)
|
||||
- The ``/-/permissions`` page now includes options for filtering or exclude permission checks recorded against the current user. (:issue:`2460`)
|
||||
- Fixed a bug where replacing a database with a new one with the same name did not pick up the new database correctly. (:issue:`2465`)
|
||||
|
||||
.. _v0_65_1:
|
||||
|
||||
0.65.1 (2024-11-28)
|
||||
-------------------
|
||||
|
||||
- Fixed bug with upgraded HTTPX 0.28.0 dependency. (:issue:`2443`)
|
||||
|
||||
.. _v0_65:
|
||||
|
||||
0.65 (2024-10-07)
|
||||
-----------------
|
||||
|
||||
- Upgrade for compatibility with Python 3.13 (by vendoring Pint dependency). (:issue:`2434`)
|
||||
- Dropped support for Python 3.8.
|
||||
|
||||
.. _v1_0_a16:
|
||||
|
||||
1.0a16 (2024-09-05)
|
||||
-------------------
|
||||
|
||||
This release focuses on performance, in particular against large tables, and introduces some minor breaking changes for CSS styling in Datasette plugins.
|
||||
|
||||
- Removed the unit conversions feature and its dependency, Pint. This means Datasette is now compatible with the upcoming Python 3.13. (:issue:`2400`, :issue:`2320`)
|
||||
- The ``datasette --pdb`` option now uses the `ipdb <https://github.com/gotcha/ipdb>`__ debugger if it is installed. You can install it using ``datasette install ipdb``. Thanks, `Tiago Ilieve <https://github.com/myhro>`__. (`#2342 <https://github.com/simonw/datasette/pull/2342>`__)
|
||||
- Fixed a confusing error that occurred if ``metadata.json`` contained nested objects. (:issue:`2403`)
|
||||
- Fixed a bug with ``?_trace=1`` where it returned a blank page if the response was larger than 256KB. (:issue:`2404`)
|
||||
- Tracing mechanism now also displays SQL queries that returned errors or ran out of time. `datasette-pretty-traces 0.5 <https://github.com/simonw/datasette-pretty-traces/releases/tag/0.5>`__ includes support for displaying this new type of trace. (:issue:`2405`)
|
||||
- Fixed a text spacing with table descriptions on the homepage. (:issue:`2399`)
|
||||
- Performance improvements for large tables:
|
||||
- Suggested facets now only consider the first 1000 rows. (:issue:`2406`)
|
||||
- Improved performance of date facet suggestion against large tables. (:issue:`2407`)
|
||||
- Row counts stop at 10,000 rows when listing tables. (:issue:`2398`)
|
||||
- On table page the count stops at 10,000 rows too, with a "count all" button to execute the full count. (:issue:`2408`)
|
||||
- New ``.dicts()`` internal method on :ref:`database_results` that returns a list of dictionaries representing the results from a SQL query: (:issue:`2414`)
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
rows = (await db.execute("select * from t")).dicts()
|
||||
|
||||
- Default Datasette core CSS that styles inputs and buttons now requires a class of ``"core"`` on the element or a containing element, for example ``<form class="core">``. (:issue:`2415`)
|
||||
- Similarly, default table styles now only apply to ``<table class="rows-and-columns">``. (:issue:`2420`)
|
||||
|
||||
.. _v1_0_a15:
|
||||
|
||||
1.0a15 (2024-08-15)
|
||||
-------------------
|
||||
|
||||
- Datasette now defaults to hiding SQLite "shadow" tables, as seen in extensions such as SQLite FTS and `sqlite-vec <https://github.com/asg017/sqlite-vec>`__. Virtual tables that it makes sense to display, such as FTS core tables, are no longer hidden. Thanks, `Alex Garcia <https://github.com/asg017>`__. (:issue:`2296`)
|
||||
- Fixed bug where running Datasette with one or more ``-s/--setting`` options could over-ride settings that were present in ``datasette.yml``. (:issue:`2389`)
|
||||
- The Datasette homepage is now duplicated at ``/-/``, using the default ``index.html`` template. This ensures that the information on that page is still accessible even if the Datasette homepage has been customized using a custom ``index.html`` template, for example on sites like `datasette.io <https://datasette.io/>`__. (:issue:`2393`)
|
||||
- Failed CSRF checks now display a more user-friendly error page. (:issue:`2390`)
|
||||
- Fixed a bug where the ``json1`` extension was not correctly detected on the ``/-/versions`` page. Thanks, `Seb Bacon <https://github.com/sebbacon>`__. (:issue:`2326`)
|
||||
- Fixed a bug where the Datasette write API did not correctly accept ``Content-Type: application/json; charset=utf-8``. (:issue:`2384`)
|
||||
- Fixed a bug where Datasette would fail to start if ``metadata.yml`` contained a ``queries`` block. (`#2386 <https://github.com/simonw/datasette/pull/2386>`__)
|
||||
|
||||
.. _v1_0_a14:
|
||||
|
||||
1.0a14 (2024-08-05)
|
||||
-------------------
|
||||
|
||||
This alpha introduces significant changes to Datasette's :ref:`metadata` system, some of which represent breaking changes in advance of the full 1.0 release. The new :ref:`upgrade_guide` document provides detailed coverage of those breaking changes and how they affect plugin authors and Datasette API consumers.
|
||||
|
||||
- The ``/databasename?sql=`` interface and JSON API for executing arbitrary SQL queries can now be found at ``/databasename/-/query?sql=``. Requests with a ``?sql=`` parameter to the old endpoints will be redirected. Thanks, `Alex Garcia <https://github.com/asg017>`__. (:issue:`2360`)
|
||||
- Metadata about tables, databases, instances and columns is now stored in :ref:`internals_internal`. Thanks, Alex Garcia. (:issue:`2341`)
|
||||
- Database write connections now execute using the ``IMMEDIATE`` isolation level for SQLite. This should help avoid a rare ``SQLITE_BUSY`` error that could occur when a transaction upgraded to a write mid-flight. (:issue:`2358`)
|
||||
- Fix for a bug where canned queries with named parameters could fail against SQLite 3.46. (:issue:`2353`)
|
||||
- Datasette now serves ``E-Tag`` headers for static files. Thanks, `Agustin Bacigalup <https://github.com/redraw>`__. (`#2306 <https://github.com/simonw/datasette/pull/2306>`__)
|
||||
- Dropdown menus now use a ``z-index`` that should avoid them being hidden by plugins. (:issue:`2311`)
|
||||
- Incorrect table and row names are no longer reflected back on the resulting 404 page. (:issue:`2359`)
|
||||
- Improved documentation for async usage of the :ref:`plugin_hook_track_event` hook. (:issue:`2319`)
|
||||
- Fixed some HTTPX deprecation warnings. (:issue:`2307`)
|
||||
- Datasette now serves a ``<html lang="en">`` attribute. Thanks, `Charles Nepote <https://github.com/CharlesNepote>`__. (:issue:`2348`)
|
||||
- Datasette's automated tests now run against the maximum and minimum supported versions of SQLite: 3.25 (from September 2018) and 3.46 (from May 2024). Thanks, Alex Garcia. (`#2352 <https://github.com/simonw/datasette/pull/2352>`__)
|
||||
- Fixed an issue where clicking twice on the URL output by ``datasette --root`` produced a confusing error. (:issue:`2375`)
|
||||
|
||||
.. _v0_64_8:
|
||||
|
||||
0.64.8 (2024-06-21)
|
||||
-------------------
|
||||
|
||||
- Security improvement: 404 pages used to reflect content from the URL path, which could be used to display misleading information to Datasette users. 404 errors no longer display additional information from the URL. (:issue:`2359`)
|
||||
- Backported a better fix for correctly extracting named parameters from canned query SQL against SQLite 3.46.0. (:issue:`2353`)
|
||||
|
||||
.. _v0_64_7:
|
||||
|
||||
0.64.7 (2024-06-12)
|
||||
-------------------
|
||||
|
||||
- Fixed a bug where canned queries with named parameters threw an error when run against SQLite 3.46.0. (:issue:`2353`)
|
||||
|
||||
.. _v1_0_a13:
|
||||
|
||||
1.0a13 (2024-03-12)
|
||||
-------------------
|
||||
|
||||
Each of the key concepts in Datasette now has an :ref:`actions menu <plugin_actions>`, which plugins can use to add additional functionality targeting that entity.
|
||||
|
||||
- Plugin hook: :ref:`view_actions() <plugin_hook_view_actions>` for actions that can be applied to a SQL view. (:issue:`2297`)
|
||||
- Plugin hook: :ref:`homepage_actions() <plugin_hook_homepage_actions>` for actions that apply to the instance homepage. (:issue:`2298`)
|
||||
- Plugin hook: :ref:`row_actions() <plugin_hook_row_actions>` for actions that apply to the row page. (:issue:`2299`)
|
||||
- Action menu items for all of the ``*_actions()`` plugin hooks can now return an optional ``"description"`` key, which will be displayed in the menu below the action label. (:issue:`2294`)
|
||||
- :ref:`Plugin hooks <plugin_hooks>` documentation page is now organized with additional headings. (:issue:`2300`)
|
||||
- Improved the display of action buttons on pages that also display metadata. (:issue:`2286`)
|
||||
- The header and footer of the page now uses a subtle gradient effect, and options in the navigation menu are better visually defined. (:issue:`2302`)
|
||||
- Table names that start with an underscore now default to hidden. (:issue:`2104`)
|
||||
- ``pragma_table_list`` has been added to the allow-list of SQLite pragma functions supported by Datasette. ``select * from pragma_table_list()`` is no longer blocked. (`#2104 <https://github.com/simonw/datasette/issues/2104#issuecomment-1982352475>`__)
|
||||
|
||||
.. _v1_0_a12:
|
||||
|
||||
1.0a12 (2024-02-29)
|
||||
-------------------
|
||||
|
||||
- New :ref:`query_actions() <plugin_hook_query_actions>` plugin hook, similar to :ref:`table_actions() <plugin_hook_table_actions>` and :ref:`database_actions() <plugin_hook_database_actions>`. Can be used to add a menu of actions to the canned query or arbitrary SQL query page. (:issue:`2283`)
|
||||
- New design for the button that opens the query, table and database actions menu. (:issue:`2281`)
|
||||
- "does not contain" table filter for finding rows that do not contain a string. (:issue:`2287`)
|
||||
- Fixed a bug in the :ref:`javascript_plugins_makeColumnActions` JavaScript plugin mechanism where the column action menu was not fully reset in between each interaction. (:issue:`2289`)
|
||||
|
||||
.. _v1_0_a11:
|
||||
|
||||
1.0a11 (2024-02-19)
|
||||
-------------------
|
||||
|
||||
- The ``"replace": true`` argument to the ``/db/table/-/insert`` API now requires the actor to have the ``update-row`` permission. (:issue:`2279`)
|
||||
- Fixed some UI bugs in the interactive permissions debugging tool. (:issue:`2278`)
|
||||
- The column action menu now aligns better with the cog icon, and positions itself taking into account the width of the browser window. (:issue:`2263`)
|
||||
|
||||
.. _v1_0_a10:
|
||||
|
||||
1.0a10 (2024-02-17)
|
||||
-------------------
|
||||
|
||||
The only changes in this alpha correspond to the way Datasette handles database transactions. (:issue:`2277`)
|
||||
|
||||
- The :ref:`database.execute_write_fn() <database_execute_write_fn>` method has a new ``transaction=True`` parameter. This defaults to ``True`` which means all functions executed using this method are now automatically wrapped in a transaction - previously the functions needed to roll transaction handling on their own, and many did not.
|
||||
- Pass ``transaction=False`` to ``execute_write_fn()`` if you want to manually handle transactions in your function.
|
||||
- Several internal Datasette features, including parts of the :ref:`JSON write API <json_api_write>`, had been failing to wrap their operations in a transaction. This has been fixed by the new ``transaction=True`` default.
|
||||
|
||||
.. _v1_0_a9:
|
||||
|
||||
1.0a9 (2024-02-16)
|
||||
------------------
|
||||
|
||||
This alpha release adds basic alter table support to the Datasette Write API and fixes a permissions bug relating to the ``/upsert`` API endpoint.
|
||||
|
||||
Alter table support for create, insert, upsert and update
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The :ref:`JSON write API <json_api_write>` can now be used to apply simple alter table schema changes, provided the acting actor has the new :ref:`actions_alter_table` permission. (:issue:`2101`)
|
||||
|
||||
The only alter operation supported so far is adding new columns to an existing table.
|
||||
|
||||
* The :ref:`/db/-/create <TableCreateView>` API now adds new columns during large operations to create a table based on incoming example ``"rows"``, in the case where one of the later rows includes columns that were not present in the earlier batches. This requires the ``create-table`` but not the ``alter-table`` permission.
|
||||
* When ``/db/-/create`` is called with rows in a situation where the table may have been already created, an ``"alter": true`` key can be included to indicate that any missing columns from the new rows should be added to the table. This requires the ``alter-table`` permission.
|
||||
* :ref:`/db/table/-/insert <TableInsertView>` and :ref:`/db/table/-/upsert <TableUpsertView>` and :ref:`/db/table/row-pks/-/update <RowUpdateView>` all now also accept ``"alter": true``, depending on the ``alter-table`` permission.
|
||||
|
||||
Operations that alter a table now fire the new :ref:`alter-table event <events>`.
|
||||
|
||||
Permissions fix for the upsert API
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The :ref:`/database/table/-/upsert API <TableUpsertView>` had a minor permissions bug, only affecting Datasette instances that had configured the ``insert-row`` and ``update-row`` permissions to apply to a specific table rather than the database or instance as a whole. Full details in issue :issue:`2262`.
|
||||
|
||||
To avoid similar mistakes in the future the ``datasette.permission_allowed()`` method now specifies ``default=`` as a keyword-only argument.
|
||||
|
||||
Permission checks now consider opinions from every plugin
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The ``datasette.permission_allowed()`` method previously consulted every plugin that implemented the ``permission_allowed()`` plugin hook and obeyed the opinion of the last plugin to return a value. (:issue:`2275`)
|
||||
|
||||
Datasette now consults every plugin and checks to see if any of them returned ``False`` (the veto rule), and if none of them did, it then checks to see if any of them returned ``True``.
|
||||
|
||||
This is explained at length in the new documentation covering :ref:`authentication_permissions_explained`.
|
||||
|
||||
Other changes
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
- The new :ref:`DATASETTE_TRACE_PLUGINS=1 environment variable <writing_plugins_tracing>` turns on detailed trace output for every executed plugin hook, useful for debugging and understanding how the plugin system works at a low level. (:issue:`2274`)
|
||||
- Datasette on Python 3.9 or above marks its non-cryptographic uses of the MD5 hash function as ``usedforsecurity=False``, for compatibility with FIPS systems. (:issue:`2270`)
|
||||
- SQL relating to :ref:`internals_internal` now executes inside a transaction, avoiding a potential database locked error. (:issue:`2273`)
|
||||
- The ``/-/threads`` debug page now identifies the database in the name associated with each dedicated write thread. (:issue:`2265`)
|
||||
- The ``/db/-/create`` API now fires a ``insert-rows`` event if rows were inserted after the table was created. (:issue:`2260`)
|
||||
|
||||
.. _v1_0_a8:
|
||||
|
||||
1.0a8 (2024-02-07)
|
||||
------------------
|
||||
|
||||
This alpha release continues the migration of Datasette's configuration from ``metadata.yaml`` to the new ``datasette.yaml`` configuration file, introduces a new system for JavaScript plugins and adds several new plugin hooks.
|
||||
|
||||
See `Datasette 1.0a8: JavaScript plugins, new plugin hooks and plugin configuration in datasette.yaml <https://simonwillison.net/2024/Feb/7/datasette-1a8/>`__ for an annotated version of these release notes.
|
||||
|
||||
Configuration
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
- Plugin configuration now lives in the :ref:`datasette.yaml configuration file <configuration>`, passed to Datasette using the ``-c/--config`` option. Thanks, Alex Garcia. (:issue:`2093`)
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
datasette -c datasette.yaml
|
||||
|
||||
Where ``datasette.yaml`` contains configuration that looks like this:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
plugins:
|
||||
datasette-cluster-map:
|
||||
latitude_column: xlat
|
||||
longitude_column: xlon
|
||||
|
||||
Previously plugins were configured in ``metadata.yaml``, which was confusing as plugin settings were unrelated to database and table metadata.
|
||||
- The ``-s/--setting`` option can now be used to set plugin configuration as well. See :ref:`configuration_cli` for details. (:issue:`2252`)
|
||||
|
||||
The above YAML configuration example using ``-s/--setting`` looks like this:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
datasette mydatabase.db \
|
||||
-s plugins.datasette-cluster-map.latitude_column xlat \
|
||||
-s plugins.datasette-cluster-map.longitude_column xlon
|
||||
|
||||
- The new ``/-/config`` page shows the current instance configuration, after redacting keys that could contain sensitive data such as API keys or passwords. (:issue:`2254`)
|
||||
|
||||
- Existing Datasette installations may already have configuration set in ``metadata.yaml`` that should be migrated to ``datasette.yaml``. To avoid breaking these installations, Datasette will silently treat table configuration, plugin configuration and allow blocks in metadata as if they had been specified in configuration instead. (:issue:`2247`) (:issue:`2248`) (:issue:`2249`)
|
||||
|
||||
Note that the ``datasette publish`` command has not yet been updated to accept a ``datasette.yaml`` configuration file. This will be addressed in :issue:`2195` but for the moment you can include those settings in ``metadata.yaml`` instead.
|
||||
|
||||
JavaScript plugins
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Datasette now includes a :ref:`JavaScript plugins mechanism <javascript_plugins>`, allowing JavaScript to customize Datasette in a way that can collaborate with other plugins.
|
||||
|
||||
This provides two initial hooks, with more to come in the future:
|
||||
|
||||
- :ref:`makeAboveTablePanelConfigs() <javascript_plugins_makeAboveTablePanelConfigs>` can add additional panels to the top of the table page.
|
||||
- :ref:`makeColumnActions() <javascript_plugins_makeColumnActions>` can add additional actions to the column menu.
|
||||
|
||||
Thanks `Cameron Yick <https://github.com/hydrosquall>`__ for contributing this feature. (`#2052 <https://github.com/simonw/datasette/pull/2052>`__)
|
||||
|
||||
Plugin hooks
|
||||
~~~~~~~~~~~~
|
||||
|
||||
- New :ref:`plugin_hook_jinja2_environment_from_request` plugin hook, which can be used to customize the current Jinja environment based on the incoming request. This can be used to modify the template lookup path based on the incoming request hostname, among other things. (:issue:`2225`)
|
||||
- New :ref:`family of template slot plugin hooks <plugin_hook_slots>`: ``top_homepage``, ``top_database``, ``top_table``, ``top_row``, ``top_query``, ``top_canned_query``. Plugins can use these to provide additional HTML to be injected at the top of the corresponding pages. (:issue:`1191`)
|
||||
- New :ref:`track_event() mechanism <plugin_event_tracking>` for plugins to emit and receive events when certain events occur within Datasette. (:issue:`2240`)
|
||||
- Plugins can register additional event classes using :ref:`plugin_hook_register_events`.
|
||||
- They can then trigger those events with the :ref:`datasette.track_event(event) <datasette_track_event>` internal method.
|
||||
- Plugins can subscribe to notifications of events using the :ref:`plugin_hook_track_event` plugin hook.
|
||||
- Datasette core now emits ``login``, ``logout``, ``create-token``, ``create-table``, ``drop-table``, ``insert-rows``, ``upsert-rows``, ``update-row``, ``delete-row`` events, :ref:`documented here <events>`.
|
||||
- New internal function for plugin authors: :ref:`database_execute_isolated_fn`, for creating a new SQLite connection, executing code and then closing that connection, all while preventing other code from writing to that particular database. This connection will not have the :ref:`prepare_connection() <plugin_hook_prepare_connection>` plugin hook executed against it, allowing plugins to perform actions that might otherwise be blocked by existing connection configuration. (:issue:`2218`)
|
||||
|
||||
Documentation
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
- Documentation describing :ref:`how to write tests that use signed actor cookies <testing_datasette_client>` using ``datasette.client.actor_cookie()``. (:issue:`1830`)
|
||||
- Documentation on how to :ref:`register a plugin for the duration of a test <testing_plugins_register_in_test>`. (:issue:`2234`)
|
||||
- The :ref:`configuration documentation <configuration>` now shows examples of both YAML and JSON for each setting.
|
||||
|
||||
Minor fixes
|
||||
~~~~~~~~~~~
|
||||
|
||||
- Datasette no longer attempts to run SQL queries in parallel when rendering a table page, as this was leading to some rare crashing bugs. (:issue:`2189`)
|
||||
- Fixed warning: ``DeprecationWarning: pkg_resources is deprecated as an API`` (:issue:`2057`)
|
||||
- Fixed bug where ``?_extra=columns`` parameter returned an incorrectly shaped response. (:issue:`2230`)
|
||||
|
||||
.. _v0_64_6:
|
||||
|
||||
0.64.6 (2023-12-22)
|
||||
-------------------
|
||||
|
||||
- Fixed a bug where CSV export with expanded labels could fail if a foreign key reference did not correctly resolve. (:issue:`2214`)
|
||||
|
||||
.. _v0_64_5:
|
||||
|
||||
0.64.5 (2023-10-08)
|
||||
-------------------
|
||||
|
||||
- Dropped dependency on ``click-default-group-wheel``, which could cause a dependency conflict. (:issue:`2197`)
|
||||
|
||||
.. _v1_0_a7:
|
||||
|
||||
1.0a7 (2023-09-21)
|
||||
------------------
|
||||
|
||||
- Fix for a crashing bug caused by viewing the table page for a named in-memory database. (:issue:`2189`)
|
||||
|
||||
.. _v0_64_4:
|
||||
|
||||
0.64.4 (2023-09-21)
|
||||
-------------------
|
||||
|
||||
- Fix for a crashing bug caused by viewing the table page for a named in-memory database. (:issue:`2189`)
|
||||
|
||||
.. _v1_0_a6:
|
||||
|
||||
1.0a6 (2023-09-07)
|
||||
------------------
|
||||
|
||||
- New plugin hook: :ref:`plugin_hook_actors_from_ids` and an internal method to accompany it, :ref:`datasette_actors_from_ids`. This mechanism is intended to be used by plugins that may need to display the actor who was responsible for something managed by that plugin: they can now resolve the recorded IDs of actors into the full actor objects. (:issue:`2181`)
|
||||
- ``DATASETTE_LOAD_PLUGINS`` environment variable for :ref:`controlling which plugins <plugins_datasette_load_plugins>` are loaded by Datasette. (:issue:`2164`)
|
||||
- Datasette now checks if the user has permission to view a table linked to by a foreign key before turning that foreign key into a clickable link. (:issue:`2178`)
|
||||
- The ``execute-sql`` permission now implies that the actor can also view the database and instance. (:issue:`2169`)
|
||||
- Documentation describing a pattern for building plugins that themselves :ref:`define further hooks <writing_plugins_extra_hooks>` for other plugins. (:issue:`1765`)
|
||||
- Datasette is now tested against the Python 3.12 preview. (`#2175 <https://github.com/simonw/datasette/pull/2175>`__)
|
||||
|
||||
.. _v1_0_a5:
|
||||
|
||||
1.0a5 (2023-08-29)
|
||||
------------------
|
||||
|
||||
- When restrictions are applied to :ref:`API tokens <CreateTokenView>`, those restrictions now behave slightly differently: applying the ``view-table`` restriction will imply the ability to ``view-database`` for the database containing that table, and both ``view-table`` and ``view-database`` will imply ``view-instance``. Previously you needed to create a token with restrictions that explicitly listed ``view-instance`` and ``view-database`` and ``view-table`` in order to view a table without getting a permission denied error. (:issue:`2102`)
|
||||
- New ``datasette.yaml`` (or ``.json``) configuration file, which can be specified using ``datasette -c path-to-file``. The goal here to consolidate settings, plugin configuration, permissions, canned queries, and other Datasette configuration into a single single file, separate from ``metadata.yaml``. The legacy ``settings.json`` config file used for :ref:`config_dir` has been removed, and ``datasette.yaml`` has a ``"settings"`` section where the same settings key/value pairs can be included. In the next future alpha release, more configuration such as plugins/permissions/canned queries will be moved to the ``datasette.yaml`` file. See :issue:`2093` for more details. Thanks, Alex Garcia.
|
||||
- The ``-s/--setting`` option can now take dotted paths to nested settings. These will then be used to set or over-ride the same options as are present in the new configuration file. (:issue:`2156`)
|
||||
- New ``--actor '{"id": "json-goes-here"}'`` option for use with ``datasette --get`` to treat the simulated request as being made by a specific actor, see :ref:`cli_datasette_get`. (:issue:`2153`)
|
||||
- The Datasette ``_internal`` database has had some changes. It no longer shows up in the ``datasette.databases`` list by default, and is now instead available to plugins using the ``datasette.get_internal_database()``. Plugins are invited to use this as a private database to store configuration and settings and secrets that should not be made visible through the default Datasette interface. Users can pass the new ``--internal internal.db`` option to persist that internal database to disk. Thanks, Alex Garcia. (:issue:`2157`).
|
||||
|
||||
.. _v1_0_a4:
|
||||
|
||||
1.0a4 (2023-08-21)
|
||||
------------------
|
||||
|
||||
This alpha fixes a security issue with the ``/-/api`` API explorer. On authenticated Datasette instances (instances protected using plugins such as `datasette-auth-passwords <https://datasette.io/plugins/datasette-auth-passwords>`__) the API explorer interface could reveal the names of databases and tables within the protected instance. The data stored in those tables was not revealed.
|
||||
|
||||
For more information and workarounds, read `the security advisory <https://github.com/simonw/datasette/security/advisories/GHSA-7ch3-7pp7-7cpq>`__. The issue has been present in every previous alpha version of Datasette 1.0: versions 1.0a0, 1.0a1, 1.0a2 and 1.0a3.
|
||||
|
||||
Also in this alpha:
|
||||
|
||||
- The new ``datasette plugins --requirements`` option outputs a list of currently installed plugins in Python ``requirements.txt`` format, useful for duplicating that installation elsewhere. (:issue:`2133`)
|
||||
- :ref:`canned_queries_writable` can now define a ``on_success_message_sql`` field in their configuration, containing a SQL query that should be executed upon successful completion of the write operation in order to generate a message to be shown to the user. (:issue:`2138`)
|
||||
- The automatically generated border color for a database is now shown in more places around the application. (:issue:`2119`)
|
||||
- Every instance of example shell script code in the documentation should now include a working copy button, free from additional syntax. (:issue:`2140`)
|
||||
|
||||
.. _v1_0_a3:
|
||||
|
||||
1.0a3 (2023-08-09)
|
||||
------------------
|
||||
|
||||
This alpha release previews the updated design for Datasette's default JSON API. (:issue:`782`)
|
||||
|
||||
The new :ref:`default JSON representation <json_api_default>` for both table pages (``/dbname/table.json``) and arbitrary SQL queries (``/dbname.json?sql=...``) is now shaped like this:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"ok": true,
|
||||
"rows": [
|
||||
{
|
||||
"id": 3,
|
||||
"name": "Detroit"
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"name": "Los Angeles"
|
||||
},
|
||||
{
|
||||
"id": 4,
|
||||
"name": "Memnonia"
|
||||
},
|
||||
{
|
||||
"id": 1,
|
||||
"name": "San Francisco"
|
||||
}
|
||||
],
|
||||
"truncated": false
|
||||
}
|
||||
|
||||
Tables will include an additional ``"next"`` key for pagination, which can be passed to ``?_next=`` to fetch the next page of results.
|
||||
|
||||
The various ``?_shape=`` options continue to work as before - see :ref:`json_api_shapes` for details.
|
||||
|
||||
A new ``?_extra=`` mechanism is available for tables, but has not yet been stabilized or documented. Details on that are available in :issue:`262`.
|
||||
|
||||
Smaller changes
|
||||
~~~~~~~~~~~~~~~
|
||||
|
||||
- Datasette documentation now shows YAML examples for :ref:`metadata` by default, with a tab interface for switching to JSON. (:issue:`1153`)
|
||||
- :ref:`plugin_register_output_renderer` plugins now have access to ``error`` and ``truncated`` arguments, allowing them to display error messages and take into account truncated results. (:issue:`2130`)
|
||||
- ``render_cell()`` plugin hook now also supports an optional ``request`` argument. (:issue:`2007`)
|
||||
- New ``Justfile`` to support development workflows for Datasette using `Just <https://github.com/casey/just>`__.
|
||||
- ``datasette.render_template()`` can now accepts a ``datasette.views.Context`` subclass as an alternative to a dictionary. (:issue:`2127`)
|
||||
- ``datasette install -e path`` option for editable installations, useful while developing plugins. (:issue:`2106`)
|
||||
- When started with the ``--cors`` option Datasette now serves an ``Access-Control-Max-Age: 3600`` header, ensuring CORS OPTIONS requests are repeated no more than once an hour. (:issue:`2079`)
|
||||
- Fixed a bug where the ``_internal`` database could display ``None`` instead of ``null`` for in-memory databases. (:issue:`1970`)
|
||||
|
||||
.. _v0_64_2:
|
||||
|
||||
0.64.2 (2023-03-08)
|
||||
-------------------
|
||||
|
||||
- Fixed a bug with ``datasette publish cloudrun`` where deploys all used the same Docker image tag. This was mostly inconsequential as the service is deployed as soon as the image has been pushed to the registry, but could result in the incorrect image being deployed if two different deploys for two separate services ran at exactly the same time. (:issue:`2036`)
|
||||
|
||||
.. _v0_64_1:
|
||||
|
||||
0.64.1 (2023-01-11)
|
||||
-------------------
|
||||
|
||||
- Documentation now links to a current source of information for installing Python 3. (:issue:`1987`)
|
||||
- Incorrectly calling the Datasette constructor using ``Datasette("path/to/data.db")`` instead of ``Datasette(["path/to/data.db"])`` now returns a useful error message. (:issue:`1985`)
|
||||
|
||||
.. _v0_64:
|
||||
|
||||
0.64 (2023-01-09)
|
||||
-----------------
|
||||
|
||||
- Datasette now **strongly recommends against allowing arbitrary SQL queries if you are using SpatiaLite**. SpatiaLite includes SQL functions that could cause the Datasette server to crash. See :ref:`spatialite` for more details.
|
||||
- New :ref:`setting_default_allow_sql` setting, providing an easier way to disable all arbitrary SQL execution by end users: ``datasette --setting default_allow_sql off``. See also :ref:`authentication_permissions_execute_sql`. (:issue:`1409`)
|
||||
- `Building a location to time zone API with SpatiaLite <https://datasette.io/tutorials/spatialite>`__ is a new Datasette tutorial showing how to safely use SpatiaLite to create a location to time zone API.
|
||||
- New documentation about :ref:`how to debug problems loading SQLite extensions <installation_extensions>`. The error message shown when an extension cannot be loaded has also been improved. (:issue:`1979`)
|
||||
- Fixed an accessibility issue: the ``<select>`` elements in the table filter form now show an outline when they are currently focused. (:issue:`1771`)
|
||||
|
||||
.. _v0_63_3:
|
||||
|
||||
0.63.3 (2022-12-17)
|
||||
-------------------
|
||||
|
||||
- Fixed a bug where ``datasette --root``, when running in Docker, would only output the URL to sign in root when the server shut down, not when it started up. (:issue:`1958`)
|
||||
- You no longer need to ensure ``await datasette.invoke_startup()`` has been called in order for Datasette to start correctly serving requests - this is now handled automatically the first time the server receives a request. This fixes a bug experienced when Datasette is served directly by an ASGI application server such as Uvicorn or Gunicorn. It also fixes a bug with the `datasette-gunicorn <https://datasette.io/plugins/datasette-gunicorn>`__ plugin. (:issue:`1955`)
|
||||
|
||||
.. _v1_0_a2:
|
||||
|
||||
1.0a2 (2022-12-14)
|
||||
------------------
|
||||
|
||||
The third Datasette 1.0 alpha release adds upsert support to the JSON API, plus the ability to specify finely grained permissions when creating an API token.
|
||||
|
||||
See `Datasette 1.0a2: Upserts and finely grained permissions <https://simonwillison.net/2022/Dec/15/datasette-1a2/>`__ for an extended, annotated version of these release notes.
|
||||
|
||||
- New ``/db/table/-/upsert`` API, :ref:`documented here <TableUpsertView>`. upsert is an update-or-insert: existing rows will have specified keys updated, but if no row matches the incoming primary key a brand new row will be inserted instead. (:issue:`1878`)
|
||||
- New ``register_permissions()`` plugin hook. Plugins can now register named permissions, which will then be listed in various interfaces that show available permissions. (:issue:`1940`)
|
||||
- The ``/db/-/create`` API for :ref:`creating a table <TableCreateView>` now accepts ``"ignore": true`` and ``"replace": true`` options when called with the ``"rows"`` property that creates a new table based on an example set of rows. This means the API can be called multiple times with different rows, setting rules for what should happen if a primary key collides with an existing row. (:issue:`1927`)
|
||||
- Arbitrary permissions can now be configured at the instance, database and resource (table, SQL view or canned query) level in Datasette's :ref:`metadata` JSON and YAML files. The new ``"permissions"`` key can be used to specify which actors should have which permissions. See :ref:`authentication_permissions_other` for details. (:issue:`1636`)
|
||||
- The ``/-/create-token`` page can now be used to create API tokens which are restricted to just a subset of actions, including against specific databases or resources. See :ref:`CreateTokenView` for details. (:issue:`1947`)
|
||||
- Likewise, the ``datasette create-token`` CLI command can now create tokens with :ref:`a subset of permissions <authentication_cli_create_token_restrict>`. (:issue:`1855`)
|
||||
- New :ref:`datasette.create_token() API method <datasette_create_token>` for programmatically creating signed API tokens. (:issue:`1951`)
|
||||
- ``/db/-/create`` API now requires actor to have ``insert-row`` permission in order to use the ``"row"`` or ``"rows"`` properties. (:issue:`1937`)
|
||||
|
||||
.. _v1_0_a1:
|
||||
|
||||
1.0a1 (2022-12-01)
|
||||
|
|
@ -664,11 +105,11 @@ Documentation
|
|||
.. _v0_62:
|
||||
|
||||
0.62 (2022-08-14)
|
||||
-----------------
|
||||
-------------------
|
||||
|
||||
Datasette can now run entirely in your browser using WebAssembly. Try out `Datasette Lite <https://lite.datasette.io/>`__, take a look `at the code <https://github.com/simonw/datasette-lite>`__ or read more about it in `Datasette Lite: a server-side Python web application running in a browser <https://simonwillison.net/2022/May/4/datasette-lite/>`__.
|
||||
|
||||
Datasette now has a `Discord community <https://datasette.io/discord>`__ for questions and discussions about Datasette and its ecosystem of projects.
|
||||
Datasette now has a `Discord community <https://discord.gg/ktd74dm5mw>`__ for questions and discussions about Datasette and its ecosystem of projects.
|
||||
|
||||
Features
|
||||
~~~~~~~~
|
||||
|
|
@ -730,7 +171,7 @@ Datasette also now requires Python 3.7 or higher.
|
|||
- Datasette is now covered by a `Code of Conduct <https://github.com/simonw/datasette/blob/main/CODE_OF_CONDUCT.md>`__. (:issue:`1654`)
|
||||
- Python 3.6 is no longer supported. (:issue:`1577`)
|
||||
- Tests now run against Python 3.11-dev. (:issue:`1621`)
|
||||
- New ``datasette.ensure_permissions(actor, permissions)`` internal method for checking multiple permissions at once. (:issue:`1675`)
|
||||
- New :ref:`datasette.ensure_permissions(actor, permissions) <datasette_ensure_permissions>` internal method for checking multiple permissions at once. (:issue:`1675`)
|
||||
- New :ref:`datasette.check_visibility(actor, action, resource=None) <datasette_check_visibility>` internal method for checking if a user can see a resource that would otherwise be invisible to unauthenticated users. (:issue:`1678`)
|
||||
- Table and row HTML pages now include a ``<link rel="alternate" type="application/json+datasette" href="...">`` element and return a ``Link: URL; rel="alternate"; type="application/json+datasette"`` HTTP header pointing to the JSON version of those pages. (:issue:`1533`)
|
||||
- ``Access-Control-Expose-Headers: Link`` is now added to the CORS headers, allowing remote JavaScript to access that header.
|
||||
|
|
@ -870,7 +311,7 @@ Other small fixes
|
|||
|
||||
- New ``datasette --uds /tmp/datasette.sock`` option for binding Datasette to a Unix domain socket, see :ref:`proxy documentation <deploying_proxy>` (:issue:`1388`)
|
||||
- ``"searchmode": "raw"`` table metadata option for defaulting a table to executing SQLite full-text search syntax without first escaping it, see :ref:`full_text_search_advanced_queries`. (:issue:`1389`)
|
||||
- New plugin hook: ``get_metadata()``, for returning custom metadata for an instance, database or table. Thanks, Brandon Roberts! (:issue:`1384`)
|
||||
- New plugin hook: :ref:`plugin_hook_get_metadata`, for returning custom metadata for an instance, database or table. Thanks, Brandon Roberts! (:issue:`1384`)
|
||||
- New plugin hook: :ref:`plugin_hook_skip_csrf`, for opting out of CSRF protection based on the incoming request. (:issue:`1377`)
|
||||
- The :ref:`menu_links() <plugin_hook_menu_links>`, :ref:`table_actions() <plugin_hook_table_actions>` and :ref:`database_actions() <plugin_hook_database_actions>` plugin hooks all gained a new optional ``request`` argument providing access to the current request. (:issue:`1371`)
|
||||
- Major performance improvement for Datasette faceting. (:issue:`1394`)
|
||||
|
|
@ -998,7 +439,7 @@ JavaScript modules
|
|||
|
||||
To use modules, JavaScript needs to be included in ``<script>`` tags with a ``type="module"`` attribute.
|
||||
|
||||
Datasette now has the ability to output ``<script type="module">`` in places where you may wish to take advantage of modules. The ``extra_js_urls`` option described in :ref:`configuration_reference_css_js` can now be used with modules, and module support is also available for the :ref:`extra_body_script() <plugin_hook_extra_body_script>` plugin hook. (:issue:`1186`, :issue:`1187`)
|
||||
Datasette now has the ability to output ``<script type="module">`` in places where you may wish to take advantage of modules. The ``extra_js_urls`` option described in :ref:`customization_css_and_javascript` can now be used with modules, and module support is also available for the :ref:`extra_body_script() <plugin_hook_extra_body_script>` plugin hook. (:issue:`1186`, :issue:`1187`)
|
||||
|
||||
`datasette-leaflet-freedraw <https://datasette.io/plugins/datasette-leaflet-freedraw>`__ is the first example of a Datasette plugin that takes advantage of the new support for JavaScript modules. See `Drawing shapes on a map to query a SpatiaLite database <https://simonwillison.net/2021/Jan/24/drawing-shapes-spatialite/>`__ for more on this plugin.
|
||||
|
||||
|
|
@ -1155,7 +596,7 @@ Smaller changes
|
|||
~~~~~~~~~~~~~~~
|
||||
|
||||
- Wide tables shown within Datasette now scroll horizontally (:issue:`998`). This is achieved using a new ``<div class="table-wrapper">`` element which may impact the implementation of some plugins (for example `this change to datasette-cluster-map <https://github.com/simonw/datasette-cluster-map/commit/fcb4abbe7df9071c5ab57defd39147de7145b34e>`__).
|
||||
- New :ref:`actions_debug_menu` permission. (:issue:`1068`)
|
||||
- New :ref:`permissions_debug_menu` permission. (:issue:`1068`)
|
||||
- Removed ``--debug`` option, which didn't do anything. (:issue:`814`)
|
||||
- ``Link:`` HTTP header pagination. (:issue:`1014`)
|
||||
- ``x`` button for clearing filters. (:issue:`1016`)
|
||||
|
|
@ -1379,10 +820,7 @@ Prior to this release the Datasette ecosystem has treated authentication as excl
|
|||
|
||||
You'll need to install plugins if you want full user accounts, but default Datasette can now authenticate a single root user with the new ``--root`` command-line option, which outputs a one-time use URL to :ref:`authenticate as a root actor <authentication_root>` (:issue:`784`)::
|
||||
|
||||
datasette fixtures.db --root
|
||||
|
||||
::
|
||||
|
||||
$ datasette fixtures.db --root
|
||||
http://127.0.0.1:8001/-/auth-token?token=5b632f8cd44b868df625f5a6e2185d88eea5b22237fd3cc8773f107cc4fd6477
|
||||
INFO: Started server process [14973]
|
||||
INFO: Waiting for application startup.
|
||||
|
|
@ -1414,7 +852,7 @@ You can use the new ``"allow"`` block syntax in ``metadata.json`` (or ``metadata
|
|||
|
||||
See :ref:`authentication_permissions_allow` for more details.
|
||||
|
||||
Plugins can implement their own custom permission checks using the new ``plugin_hook_permission_allowed()`` plugin hook.
|
||||
Plugins can implement their own custom permission checks using the new :ref:`plugin_hook_permission_allowed` hook.
|
||||
|
||||
A new debug page at ``/-/permissions`` shows recent permission checks, to help administrators and plugin authors understand exactly what checks are being performed. This tool defaults to only being available to the root user, but can be exposed to other users by plugins that respond to the ``permissions-debug`` permission. (:issue:`788`)
|
||||
|
||||
|
|
@ -1490,7 +928,7 @@ Smaller changes
|
|||
- New :ref:`datasette.get_database() <datasette_get_database>` method.
|
||||
- Added ``_`` prefix to many private, undocumented methods of the Datasette class. (:issue:`576`)
|
||||
- Removed the ``db.get_outbound_foreign_keys()`` method which duplicated the behaviour of ``db.foreign_keys_for_table()``.
|
||||
- New ``await datasette.permission_allowed()`` method.
|
||||
- New :ref:`await datasette.permission_allowed() <datasette_permission_allowed>` method.
|
||||
- ``/-/actor`` debugging endpoint for viewing the currently authenticated actor.
|
||||
- New ``request.cookies`` property.
|
||||
- ``/-/plugins`` endpoint now shows a list of hooks implemented by each plugin, e.g. https://latest.datasette.io/-/plugins?all=1
|
||||
|
|
@ -1553,7 +991,7 @@ You can now create :ref:`custom pages <custom_pages>` within your Datasette inst
|
|||
|
||||
:ref:`config_dir` (:issue:`731`) allows you to define a custom Datasette instance as a directory. So instead of running the following::
|
||||
|
||||
datasette one.db two.db \
|
||||
$ datasette one.db two.db \
|
||||
--metadata=metadata.json \
|
||||
--template-dir=templates/ \
|
||||
--plugins-dir=plugins \
|
||||
|
|
@ -1561,7 +999,7 @@ You can now create :ref:`custom pages <custom_pages>` within your Datasette inst
|
|||
|
||||
You can instead arrange your files in a single directory called ``my-project`` and run this::
|
||||
|
||||
datasette my-project/
|
||||
$ datasette my-project/
|
||||
|
||||
Also in this release:
|
||||
|
||||
|
|
@ -1578,7 +1016,7 @@ Also in this release:
|
|||
0.40 (2020-04-21)
|
||||
-----------------
|
||||
|
||||
* Datasette :ref:`metadata` can now be provided as a YAML file as an optional alternative to JSON. (:issue:`713`)
|
||||
* Datasette :ref:`metadata` can now be provided as a YAML file as an optional alternative to JSON. See :ref:`metadata_yaml`. (:issue:`713`)
|
||||
* Removed support for ``datasette publish now``, which used the the now-retired Zeit Now v1 hosting platform. A new plugin, `datasette-publish-now <https://github.com/simonw/datasette-publish-now>`__, can be installed to publish data to Zeit (`now Vercel <https://vercel.com/blog/zeit-is-now-vercel>`__) Now v2. (:issue:`710`)
|
||||
* Fixed a bug where the ``extra_template_vars(request, view_name)`` plugin hook was not receiving the correct ``view_name``. (:issue:`716`)
|
||||
* Variables added to the template context by the ``extra_template_vars()`` plugin hook are now shown in the ``?_context=1`` debugging mode (see :ref:`setting_template_debug`). (:issue:`693`)
|
||||
|
|
@ -2233,10 +1671,7 @@ In addition to the work on facets:
|
|||
|
||||
Added new help section::
|
||||
|
||||
datasette --help-config
|
||||
|
||||
::
|
||||
|
||||
$ datasette --help-config
|
||||
Config options:
|
||||
default_page_size Default page size for the table view
|
||||
(default=100)
|
||||
|
|
|
|||
|
|
@ -112,19 +112,16 @@ Once started you can access it at ``http://localhost:8001``
|
|||
--static MOUNT:DIRECTORY Serve static files from this directory at
|
||||
/MOUNT/...
|
||||
--memory Make /_memory database available
|
||||
-c, --config FILENAME Path to JSON/YAML Datasette configuration file
|
||||
-s, --setting SETTING... nested.key, value setting to use in Datasette
|
||||
configuration
|
||||
--config CONFIG Deprecated: set config option using
|
||||
configname:value. Use --setting instead.
|
||||
--setting SETTING... Setting, see
|
||||
docs.datasette.io/en/stable/settings.html
|
||||
--secret TEXT Secret used for signing secure values, such as
|
||||
signed cookies
|
||||
--root Output URL that sets a cookie authenticating
|
||||
the root user
|
||||
--default-deny Deny all permissions by default
|
||||
--get TEXT Run an HTTP GET request against this path,
|
||||
print results and exit
|
||||
--headers Include HTTP headers in --get output
|
||||
--token TEXT API token to send with --get requests
|
||||
--actor TEXT Actor to use for --get requests (JSON string)
|
||||
--version-note TEXT Additional note to show on /-/versions
|
||||
--help-settings Show available settings
|
||||
--pdb Launch debugger on any errors
|
||||
|
|
@ -136,24 +133,11 @@ Once started you can access it at ``http://localhost:8001``
|
|||
mode
|
||||
--ssl-keyfile TEXT SSL key file
|
||||
--ssl-certfile TEXT SSL certificate file
|
||||
--internal PATH Path to a persistent Datasette internal SQLite
|
||||
database
|
||||
--help Show this message and exit.
|
||||
|
||||
|
||||
.. [[[end]]]
|
||||
|
||||
.. _cli_datasette_serve_env:
|
||||
|
||||
Environment variables
|
||||
---------------------
|
||||
|
||||
Some of the ``datasette serve`` options can be provided by environment variables:
|
||||
|
||||
- ``DATASETTE_SECRET``: Equivalent to the ``--secret`` option.
|
||||
- ``DATASETTE_SSL_KEYFILE``: Equivalent to the ``--ssl-keyfile`` option.
|
||||
- ``DATASETTE_SSL_CERTFILE``: Equivalent to the ``--ssl-certfile`` option.
|
||||
- ``DATASETTE_LOAD_EXTENSION``: Equivalent to the ``--load-extension`` option.
|
||||
|
||||
.. _cli_datasette_get:
|
||||
|
||||
|
|
@ -164,14 +148,9 @@ The ``--get`` option to ``datasette serve`` (or just ``datasette``) specifies th
|
|||
|
||||
This means that all of Datasette's functionality can be accessed directly from the command-line.
|
||||
|
||||
For example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
datasette --get '/-/versions.json' | jq .
|
||||
|
||||
.. code-block:: json
|
||||
For example::
|
||||
|
||||
$ datasette --get '/-/versions.json' | jq .
|
||||
{
|
||||
"python": {
|
||||
"version": "3.8.5",
|
||||
|
|
@ -210,15 +189,7 @@ For example:
|
|||
}
|
||||
}
|
||||
|
||||
You can use the ``--token TOKEN`` option to send an :ref:`API token <CreateTokenView>` with the simulated request.
|
||||
|
||||
Or you can make a request as a specific actor by passing a JSON representation of that actor to ``--actor``:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
datasette --memory --actor '{"id": "root"}' --get '/-/actor.json'
|
||||
|
||||
The exit code of ``datasette --get`` will be 0 if the request succeeds and 1 if the request produced an HTTP status code other than 200 - e.g. a 404 or 500 error.
|
||||
The exit code will be 0 if the request succeeds and 1 if the request produced an HTTP status code other than 200 - e.g. a 404 or 500 error.
|
||||
|
||||
This lets you use ``datasette --get /`` to run tests against a Datasette application in a continuous integration environment such as GitHub Actions.
|
||||
|
||||
|
|
@ -260,8 +231,6 @@ These can be passed to ``datasette serve`` using ``datasette serve --setting nam
|
|||
database files (default=True)
|
||||
allow_signed_tokens Allow users to create and use signed API tokens
|
||||
(default=True)
|
||||
default_allow_sql Allow anyone to run arbitrary SQL queries
|
||||
(default=True)
|
||||
max_signed_tokens_ttl Maximum allowed expiry time for signed API tokens
|
||||
(default=0)
|
||||
suggest_facets Calculate and display suggested facets
|
||||
|
|
@ -308,7 +277,6 @@ Output JSON showing all currently installed plugins, their versions, whether the
|
|||
|
||||
Options:
|
||||
--all Include built-in default plugins
|
||||
--requirements Output requirements.txt of installed plugins
|
||||
--plugins-dir DIRECTORY Path to directory containing custom plugins
|
||||
--help Show this message and exit.
|
||||
|
||||
|
|
@ -372,15 +340,13 @@ Would install the `datasette-cluster-map <https://datasette.io/plugins/datasette
|
|||
|
||||
::
|
||||
|
||||
Usage: datasette install [OPTIONS] [PACKAGES]...
|
||||
Usage: datasette install [OPTIONS] PACKAGES...
|
||||
|
||||
Install plugins and packages from PyPI into the same environment as Datasette
|
||||
|
||||
Options:
|
||||
-U, --upgrade Upgrade packages to latest version
|
||||
-r, --requirement PATH Install from requirements file
|
||||
-e, --editable TEXT Install a project in editable mode from this path
|
||||
--help Show this message and exit.
|
||||
-U, --upgrade Upgrade packages to latest version
|
||||
--help Show this message and exit.
|
||||
|
||||
|
||||
.. [[[end]]]
|
||||
|
|
@ -490,15 +456,8 @@ See :ref:`publish_cloud_run`.
|
|||
--cpu [1|2|4] Number of vCPUs to allocate in Cloud Run
|
||||
--timeout INTEGER Build timeout in seconds
|
||||
--apt-get-install TEXT Additional packages to apt-get install
|
||||
--max-instances INTEGER Maximum Cloud Run instances (use 0 to remove
|
||||
the limit) [default: 1]
|
||||
--max-instances INTEGER Maximum Cloud Run instances
|
||||
--min-instances INTEGER Minimum Cloud Run instances
|
||||
--artifact-repository TEXT Artifact Registry repository to store the
|
||||
image [default: datasette]
|
||||
--artifact-region TEXT Artifact Registry location (region or multi-
|
||||
region) [default: us]
|
||||
--artifact-project TEXT Project ID for Artifact Registry (defaults to
|
||||
the active project)
|
||||
--help Show this message and exit.
|
||||
|
||||
|
||||
|
|
@ -658,42 +617,12 @@ Create a signed API token, see :ref:`authentication_cli_create_token`.
|
|||
|
||||
Create a signed API token for the specified actor ID
|
||||
|
||||
Example:
|
||||
|
||||
datasette create-token root --secret mysecret
|
||||
|
||||
To allow only "view-database-download" for all databases:
|
||||
|
||||
datasette create-token root --secret mysecret \
|
||||
--all view-database-download
|
||||
|
||||
To allow "create-table" against a specific database:
|
||||
|
||||
datasette create-token root --secret mysecret \
|
||||
--database mydb create-table
|
||||
|
||||
To allow "insert-row" against a specific table:
|
||||
|
||||
datasette create-token root --secret myscret \
|
||||
--resource mydb mytable insert-row
|
||||
|
||||
Restricted actions can be specified multiple times using multiple --all,
|
||||
--database, and --resource options.
|
||||
|
||||
Add --debug to see a decoded version of the token.
|
||||
|
||||
Options:
|
||||
--secret TEXT Secret used for signing the API tokens
|
||||
[required]
|
||||
-e, --expires-after INTEGER Token should expire after this many seconds
|
||||
-a, --all ACTION Restrict token to this action
|
||||
-d, --database DB ACTION Restrict token to this action on this database
|
||||
-r, --resource DB RESOURCE ACTION
|
||||
Restrict token to this action on this database
|
||||
resource (a table, SQL view or named query)
|
||||
--debug Show decoded token
|
||||
--plugins-dir DIRECTORY Path to directory containing custom plugins
|
||||
--help Show this message and exit.
|
||||
--secret TEXT Secret used for signing the API tokens
|
||||
[required]
|
||||
-e, --expires-after INTEGER Token should expire after this many seconds
|
||||
--debug Show decoded token
|
||||
--help Show this message and exit.
|
||||
|
||||
|
||||
.. [[[end]]]
|
||||
|
|
|
|||
|
|
@ -1,6 +1 @@
|
|||
alls
|
||||
fo
|
||||
ro
|
||||
te
|
||||
ths
|
||||
notin
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue