diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 88bb03b1..b969c4c1 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -5,7 +5,9 @@ updates: schedule: interval: daily time: "13:00" - groups: - python-packages: - patterns: - - "*" + open-pull-requests-limit: 10 + ignore: + - dependency-name: black + versions: + - 21.4b0 + - 21.4b1 diff --git a/.github/workflows/deploy-branch-preview.yml b/.github/workflows/deploy-branch-preview.yml deleted file mode 100644 index e56d9c27..00000000 --- a/.github/workflows/deploy-branch-preview.yml +++ /dev/null @@ -1,35 +0,0 @@ -name: Deploy a Datasette branch preview to Vercel - -on: - workflow_dispatch: - inputs: - branch: - description: "Branch to deploy" - required: true - type: string - -jobs: - deploy-branch-preview: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: Set up Python 3.11 - uses: actions/setup-python@v6 - with: - python-version: "3.11" - - name: Install dependencies - run: | - pip install datasette-publish-vercel - - name: Deploy the preview - env: - VERCEL_TOKEN: ${{ secrets.BRANCH_PREVIEW_VERCEL_TOKEN }} - run: | - export BRANCH="${{ github.event.inputs.branch }}" - wget https://latest.datasette.io/fixtures.db - datasette publish vercel fixtures.db \ - --branch $BRANCH \ - --project "datasette-preview-$BRANCH" \ - --token $VERCEL_TOKEN \ - --scope datasette \ - --about "Preview of $BRANCH" \ - --about_url "https://github.com/simonw/datasette/tree/$BRANCH" diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 9f53b01e..d6baa2f6 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -1,11 +1,10 @@ name: Deploy latest.datasette.io on: - workflow_dispatch: push: branches: - main - # - 1.0-dev + - 1.0-dev permissions: contents: read @@ -15,12 +14,18 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out datasette - uses: actions/checkout@v5 + uses: actions/checkout@v3 - name: Set up Python - uses: actions/setup-python@v6 + uses: actions/setup-python@v4 with: - python-version: "3.13" - cache: pip + python-version: "3.9" + - uses: actions/cache@v3 + name: Configure pip caching + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} + restore-keys: | + ${{ runner.os }}-pip- - name: Install Python dependencies run: | python -m pip install --upgrade pip @@ -32,19 +37,13 @@ jobs: run: | pytest -n auto -m "not serial" pytest -m "serial" - - name: Build fixtures.db and other files needed to deploy the demo - run: |- - python tests/fixtures.py \ - fixtures.db \ - fixtures-config.json \ - fixtures-metadata.json \ - plugins \ - --extra-db-filename extra_database.db + - name: Build fixtures.db + run: python tests/fixtures.py fixtures.db fixtures.json plugins --extra-db-filename extra_database.db - name: Build docs.db if: ${{ github.ref == 'refs/heads/main' }} run: |- cd docs - DISABLE_SPHINX_INLINE_TABS=1 sphinx-build -b xml . _build + sphinx-build -b xml . _build sphinx-to-sqlite ../docs.db _build cd .. - name: Set up the alternate-route demo @@ -58,50 +57,19 @@ jobs: db.route = "alternative-route" ' > plugins/alternative_route.py cp fixtures.db fixtures2.db - - name: And the counters writable canned query demo + - name: Make some modifications to metadata.json run: | - cat > plugins/counters.py < metadata.json - # cat metadata.json - - id: auth - name: Authenticate to Google Cloud - uses: google-github-actions/auth@v3 + cat fixtures.json | \ + jq '.databases |= . + {"ephemeral": {"allow": {"id": "*"}}}' | \ + jq '.plugins |= . + {"datasette-ephemeral-tables": {"table_ttl": 900}}' \ + > metadata.json + cat metadata.json + - name: Set up Cloud Run + uses: google-github-actions/setup-gcloud@v0 with: - credentials_json: ${{ secrets.GCP_SA_KEY }} - - name: Set up Cloud SDK - uses: google-github-actions/setup-gcloud@v3 + version: '318.0.0' + service_account_email: ${{ secrets.GCP_SA_EMAIL }} + service_account_key: ${{ secrets.GCP_SA_KEY }} - name: Deploy to Cloud Run env: LATEST_DATASETTE_SECRET: ${{ secrets.LATEST_DATASETTE_SECRET }} @@ -113,7 +81,7 @@ jobs: # Replace 1.0 with one-dot-zero in SUFFIX export SUFFIX=${SUFFIX//1.0/one-dot-zero} datasette publish cloudrun fixtures.db fixtures2.db extra_database.db \ - -m fixtures-metadata.json \ + -m metadata.json \ --plugins-dir=plugins \ --branch=$GITHUB_SHA \ --version-note=$GITHUB_SHA \ diff --git a/.github/workflows/prettier.yml b/.github/workflows/prettier.yml index 77cce7d1..ded41040 100644 --- a/.github/workflows/prettier.yml +++ b/.github/workflows/prettier.yml @@ -10,8 +10,8 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out repo - uses: actions/checkout@v4 - - uses: actions/cache@v4 + uses: actions/checkout@v2 + - uses: actions/cache@v2 name: Configure npm caching with: path: ~/.npm diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index e94d0bdd..fa608055 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -12,15 +12,20 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"] + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v6 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - cache: pip - cache-dependency-path: pyproject.toml + - uses: actions/cache@v3 + name: Configure pip caching + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} + restore-keys: | + ${{ runner.os }}-pip- - name: Install dependencies run: | pip install -e '.[test]' @@ -31,38 +36,47 @@ jobs: deploy: runs-on: ubuntu-latest needs: [test] - environment: release - permissions: - id-token: write steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v3 - name: Set up Python - uses: actions/setup-python@v6 + uses: actions/setup-python@v4 with: - python-version: '3.13' - cache: pip - cache-dependency-path: pyproject.toml + python-version: '3.11' + - uses: actions/cache@v3 + name: Configure pip caching + with: + path: ~/.cache/pip + key: ${{ runner.os }}-publish-pip-${{ hashFiles('**/setup.py') }} + restore-keys: | + ${{ runner.os }}-publish-pip- - name: Install dependencies run: | - pip install setuptools wheel build - - name: Build - run: | - python -m build + pip install setuptools wheel twine - name: Publish - uses: pypa/gh-action-pypi-publish@release/v1 + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} + run: | + python setup.py sdist bdist_wheel + twine upload dist/* deploy_static_docs: runs-on: ubuntu-latest needs: [deploy] if: "!github.event.release.prerelease" steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v2 - name: Set up Python - uses: actions/setup-python@v6 + uses: actions/setup-python@v2 with: python-version: '3.10' - cache: pip - cache-dependency-path: pyproject.toml + - uses: actions/cache@v2 + name: Configure pip caching + with: + path: ~/.cache/pip + key: ${{ runner.os }}-publish-pip-${{ hashFiles('**/setup.py') }} + restore-keys: | + ${{ runner.os }}-publish-pip- - name: Install dependencies run: | python -m pip install -e .[docs] @@ -70,16 +84,15 @@ jobs: - name: Build docs.db run: |- cd docs - DISABLE_SPHINX_INLINE_TABS=1 sphinx-build -b xml . _build + sphinx-build -b xml . _build sphinx-to-sqlite ../docs.db _build cd .. - - id: auth - name: Authenticate to Google Cloud - uses: google-github-actions/auth@v2 + - name: Set up Cloud Run + uses: google-github-actions/setup-gcloud@v0 with: - credentials_json: ${{ secrets.GCP_SA_KEY }} - - name: Set up Cloud SDK - uses: google-github-actions/setup-gcloud@v3 + version: '275.0.0' + service_account_email: ${{ secrets.GCP_SA_EMAIL }} + service_account_key: ${{ secrets.GCP_SA_KEY }} - name: Deploy stable-docs.datasette.io to Cloud Run run: |- gcloud config set run/region us-central1 @@ -92,7 +105,7 @@ jobs: needs: [deploy] if: "!github.event.release.prerelease" steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v2 - name: Build and push to Docker Hub env: DOCKER_USER: ${{ secrets.DOCKER_USER }} diff --git a/.github/workflows/spellcheck.yml b/.github/workflows/spellcheck.yml index 7c5370ce..a2621ecc 100644 --- a/.github/workflows/spellcheck.yml +++ b/.github/workflows/spellcheck.yml @@ -9,19 +9,22 @@ jobs: spellcheck: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v6 + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 with: - python-version: '3.11' - cache: 'pip' - cache-dependency-path: '**/pyproject.toml' + python-version: 3.9 + - uses: actions/cache@v2 + name: Configure pip caching + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} + restore-keys: | + ${{ runner.os }}-pip- - name: Install dependencies run: | pip install -e '.[docs]' - name: Check spelling run: | - codespell README.md --ignore-words docs/codespell-ignore-words.txt codespell docs/*.rst --ignore-words docs/codespell-ignore-words.txt codespell datasette -S datasette/static --ignore-words docs/codespell-ignore-words.txt - codespell tests --ignore-words docs/codespell-ignore-words.txt diff --git a/.github/workflows/stable-docs.yml b/.github/workflows/stable-docs.yml deleted file mode 100644 index 3119d617..00000000 --- a/.github/workflows/stable-docs.yml +++ /dev/null @@ -1,76 +0,0 @@ -name: Update Stable Docs - -on: - release: - types: [published] - push: - branches: - - main - -permissions: - contents: write - -jobs: - update_stable_docs: - runs-on: ubuntu-latest - steps: - - name: Checkout repository - uses: actions/checkout@v5 - with: - fetch-depth: 0 # We need all commits to find docs/ changes - - name: Set up Git user - run: | - git config user.name "Automated" - git config user.email "actions@users.noreply.github.com" - - name: Create stable branch if it does not yet exist - run: | - if ! git ls-remote --heads origin stable | grep -qE '\bstable\b'; then - # Make sure we have all tags locally - git fetch --tags --quiet - - # Latest tag that is just numbers and dots (optionally prefixed with 'v') - # e.g., 0.65.2 or v0.65.2 — excludes 1.0a20, 1.0-rc1, etc. - LATEST_RELEASE=$( - git tag -l --sort=-v:refname \ - | grep -E '^v?[0-9]+(\.[0-9]+){1,3}$' \ - | head -n1 - ) - - git checkout -b stable - - # If there are any stable releases, copy docs/ from the most recent - if [ -n "$LATEST_RELEASE" ]; then - rm -rf docs/ - git checkout "$LATEST_RELEASE" -- docs/ || true - fi - - git commit -m "Populate docs/ from $LATEST_RELEASE" || echo "No changes" - git push -u origin stable - fi - - name: Handle Release - if: github.event_name == 'release' && !github.event.release.prerelease - run: | - git fetch --all - git checkout stable - git reset --hard ${GITHUB_REF#refs/tags/} - git push origin stable --force - - name: Handle Commit to Main - if: contains(github.event.head_commit.message, '!stable-docs') - run: | - git fetch origin - git checkout -b stable origin/stable - # Get the list of modified files in docs/ from the current commit - FILES=$(git diff-tree --no-commit-id --name-only -r ${{ github.sha }} -- docs/) - # Check if the list of files is non-empty - if [[ -n "$FILES" ]]; then - # Checkout those files to the stable branch to over-write with their contents - for FILE in $FILES; do - git checkout ${{ github.sha }} -- $FILE - done - git add docs/ - git commit -m "Doc changes from ${{ github.sha }}" - git push origin stable - else - echo "No changes to docs/ in this commit." - exit 0 - fi diff --git a/.github/workflows/test-coverage.yml b/.github/workflows/test-coverage.yml index 8d73b64d..bd720664 100644 --- a/.github/workflows/test-coverage.yml +++ b/.github/workflows/test-coverage.yml @@ -15,13 +15,18 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out datasette - uses: actions/checkout@v4 + uses: actions/checkout@v2 - name: Set up Python - uses: actions/setup-python@v6 + uses: actions/setup-python@v2 with: - python-version: '3.12' - cache: 'pip' - cache-dependency-path: '**/pyproject.toml' + python-version: 3.9 + - uses: actions/cache@v2 + name: Configure pip caching + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} + restore-keys: | + ${{ runner.os }}-pip- - name: Install Python dependencies run: | python -m pip install --upgrade pip @@ -31,7 +36,7 @@ jobs: run: |- ls -lah cat .coveragerc - pytest -m "not serial" --cov=datasette --cov-config=.coveragerc --cov-report xml:coverage.xml --cov-report term -x + pytest --cov=datasette --cov-config=.coveragerc --cov-report xml:coverage.xml --cov-report term ls -lah - name: Upload coverage report uses: codecov/codecov-action@v1 diff --git a/.github/workflows/test-pyodide.yml b/.github/workflows/test-pyodide.yml index b490a9bf..bc9593a8 100644 --- a/.github/workflows/test-pyodide.yml +++ b/.github/workflows/test-pyodide.yml @@ -12,15 +12,15 @@ jobs: test: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v3 - name: Set up Python 3.10 - uses: actions/setup-python@v6 + uses: actions/setup-python@v3 with: python-version: "3.10" cache: 'pip' - cache-dependency-path: '**/pyproject.toml' + cache-dependency-path: '**/setup.py' - name: Cache Playwright browsers - uses: actions/cache@v4 + uses: actions/cache@v2 with: path: ~/.cache/ms-playwright/ key: ${{ runner.os }}-browsers diff --git a/.github/workflows/test-sqlite-support.yml b/.github/workflows/test-sqlite-support.yml deleted file mode 100644 index 76ea138a..00000000 --- a/.github/workflows/test-sqlite-support.yml +++ /dev/null @@ -1,53 +0,0 @@ -name: Test SQLite versions - -on: [push, pull_request] - -permissions: - contents: read - -jobs: - test: - runs-on: ${{ matrix.platform }} - continue-on-error: true - strategy: - matrix: - platform: [ubuntu-latest] - python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"] - sqlite-version: [ - #"3", # latest version - "3.46", - #"3.45", - #"3.27", - #"3.26", - "3.25", - #"3.25.3", # 2018-09-25, window functions breaks test_upsert for some reason on 3.10, skip for now - #"3.24", # 2018-06-04, added UPSERT support - #"3.23.1" # 2018-04-10, before UPSERT - ] - steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v6 - with: - python-version: ${{ matrix.python-version }} - allow-prereleases: true - cache: pip - cache-dependency-path: pyproject.toml - - name: Set up SQLite ${{ matrix.sqlite-version }} - uses: asg017/sqlite-versions@71ea0de37ae739c33e447af91ba71dda8fcf22e6 - with: - version: ${{ matrix.sqlite-version }} - cflags: "-DSQLITE_ENABLE_DESERIALIZE -DSQLITE_ENABLE_FTS5 -DSQLITE_ENABLE_FTS4 -DSQLITE_ENABLE_FTS3_PARENTHESIS -DSQLITE_ENABLE_RTREE -DSQLITE_ENABLE_JSON1" - - run: python3 -c "import sqlite3; print(sqlite3.sqlite_version)" - - run: echo $LD_LIBRARY_PATH - - name: Build extension for --load-extension test - run: |- - (cd tests && gcc ext.c -fPIC -shared -o ext.so) - - name: Install dependencies - run: | - pip install -e '.[test]' - pip freeze - - name: Run tests - run: | - pytest -n auto -m "not serial" - pytest -m "serial" diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 1e5e03d2..886f649a 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -10,16 +10,20 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"] + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v6 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - allow-prereleases: true - cache: pip - cache-dependency-path: pyproject.toml + - uses: actions/cache@v3 + name: Configure pip caching + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} + restore-keys: | + ${{ runner.os }}-pip- - name: Build extension for --load-extension test run: |- (cd tests && gcc ext.c -fPIC -shared -o ext.so) @@ -31,13 +35,6 @@ jobs: run: | pytest -n auto -m "not serial" pytest -m "serial" - # And the test that exceeds a localhost HTTPS server - tests/test_datasette_https_server.sh - - name: Install docs dependencies - run: | - pip install -e '.[docs]' - - name: Black - run: black --check . - name: Check if cog needs to be run run: | cog --check docs/*.rst @@ -45,7 +42,3 @@ jobs: run: | # This fails on syntax errors, or a diff was applied blacken-docs -l 60 docs/*.rst - - name: Test DATASETTE_LOAD_PLUGINS - run: | - pip install datasette-init datasette-json-html - tests/test-datasette-load-plugins.sh diff --git a/.github/workflows/tmate.yml b/.github/workflows/tmate.yml index 123f6c71..9792245d 100644 --- a/.github/workflows/tmate.yml +++ b/.github/workflows/tmate.yml @@ -5,7 +5,6 @@ on: permissions: contents: read - models: read jobs: build: @@ -14,5 +13,3 @@ jobs: - uses: actions/checkout@v2 - name: Setup tmate session uses: mxschmitt/action-tmate@v3 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.gitignore b/.gitignore index 70e6bbeb..277ff653 100644 --- a/.gitignore +++ b/.gitignore @@ -5,9 +5,6 @@ scratchpad .vscode -uv.lock -data.db - # We don't use Pipfile, so ignore them Pipfile Pipfile.lock @@ -126,4 +123,4 @@ node_modules # include it in source control. tests/*.dylib tests/*.so -tests/*.dll +tests/*.dll \ No newline at end of file diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 5b30e75a..e157fb9c 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -3,7 +3,7 @@ version: 2 build: os: ubuntu-20.04 tools: - python: "3.11" + python: "3.9" sphinx: configuration: docs/conf.py diff --git a/Justfile b/Justfile deleted file mode 100644 index a47662c3..00000000 --- a/Justfile +++ /dev/null @@ -1,56 +0,0 @@ -export DATASETTE_SECRET := "not_a_secret" - -# Run tests and linters -@default: test lint - -# Setup project -@init: - uv sync --extra test --extra docs - -# Run pytest with supplied options -@test *options: init - uv run pytest -n auto {{options}} - -@codespell: - uv run codespell README.md --ignore-words docs/codespell-ignore-words.txt - uv run codespell docs/*.rst --ignore-words docs/codespell-ignore-words.txt - uv run codespell datasette -S datasette/static --ignore-words docs/codespell-ignore-words.txt - uv run codespell tests --ignore-words docs/codespell-ignore-words.txt - -# Run linters: black, flake8, mypy, cog -@lint: codespell - uv run black . --check - uv run flake8 - uv run --extra test cog --check README.md docs/*.rst - -# Rebuild docs with cog -@cog: - uv run --extra test cog -r README.md docs/*.rst - -# Serve live docs on localhost:8000 -@docs: cog blacken-docs - uv run --extra docs make -C docs livehtml - -# Build docs as static HTML -@docs-build: cog blacken-docs - rm -rf docs/_build && cd docs && uv run make html - -# Apply Black -@black: - uv run black . - -# Apply blacken-docs -@blacken-docs: - uv run blacken-docs -l 60 docs/*.rst - -# Apply prettier -@prettier: - npm run fix - -# Format code with both black and prettier -@format: black prettier blacken-docs - -@serve *options: - uv run sqlite-utils create-database data.db - uv run sqlite-utils create-table data.db docs id integer title text --pk id --ignore - uv run python -m datasette data.db --root --reload {{options}} diff --git a/README.md b/README.md index 393e8e5c..af95b85e 100644 --- a/README.md +++ b/README.md @@ -1,13 +1,13 @@ Datasette [![PyPI](https://img.shields.io/pypi/v/datasette.svg)](https://pypi.org/project/datasette/) -[![Changelog](https://img.shields.io/github/v/release/simonw/datasette?label=changelog)](https://docs.datasette.io/en/latest/changelog.html) +[![Changelog](https://img.shields.io/github/v/release/simonw/datasette?label=changelog)](https://docs.datasette.io/en/stable/changelog.html) [![Python 3.x](https://img.shields.io/pypi/pyversions/datasette.svg?logo=python&logoColor=white)](https://pypi.org/project/datasette/) [![Tests](https://github.com/simonw/datasette/workflows/Test/badge.svg)](https://github.com/simonw/datasette/actions?query=workflow%3ATest) [![Documentation Status](https://readthedocs.org/projects/datasette/badge/?version=latest)](https://docs.datasette.io/en/latest/?badge=latest) [![License](https://img.shields.io/badge/license-Apache%202.0-blue.svg)](https://github.com/simonw/datasette/blob/main/LICENSE) [![docker: datasette](https://img.shields.io/badge/docker-datasette-blue)](https://hub.docker.com/r/datasetteproject/datasette) -[![discord](https://img.shields.io/discord/823971286308356157?label=discord)](https://datasette.io/discord) +[![discord](https://img.shields.io/discord/823971286308356157?label=discord)](https://discord.gg/ktd74dm5mw) *An open source multi-tool for exploring and publishing data* @@ -15,14 +15,14 @@ Datasette is a tool for exploring and publishing data. It helps people take data Datasette is aimed at data journalists, museum curators, archivists, local governments, scientists, researchers and anyone else who has data that they wish to share with the world. -[Explore a demo](https://datasette.io/global-power-plants/global-power-plants), watch [a video about the project](https://simonwillison.net/2021/Feb/7/video/) or try it out [on GitHub Codespaces](https://github.com/datasette/datasette-studio). +[Explore a demo](https://global-power-plants.datasettes.com/global-power-plants/global-power-plants), watch [a video about the project](https://simonwillison.net/2021/Feb/7/video/) or try it out by [uploading and publishing your own CSV data](https://docs.datasette.io/en/stable/getting_started.html#try-datasette-without-installing-anything-using-glitch). * [datasette.io](https://datasette.io/) is the official project website * Latest [Datasette News](https://datasette.io/news) * Comprehensive documentation: https://docs.datasette.io/ * Examples: https://datasette.io/examples * Live demo of current `main` branch: https://latest.datasette.io/ -* Questions, feedback or want to talk about the project? Join our [Discord](https://datasette.io/discord) +* Questions, feedback or want to talk about the project? Join our [Discord](https://discord.gg/ktd74dm5mw) Want to stay up-to-date with the project? Subscribe to the [Datasette newsletter](https://datasette.substack.com/) for tips, tricks and news on what's new in the Datasette ecosystem. @@ -36,7 +36,7 @@ You can also install it using `pip` or `pipx`: pip install datasette -Datasette requires Python 3.8 or higher. We also have [detailed installation instructions](https://docs.datasette.io/en/stable/installation.html) covering other options such as Docker. +Datasette requires Python 3.7 or higher. We also have [detailed installation instructions](https://docs.datasette.io/en/stable/installation.html) covering other options such as Docker. ## Basic usage diff --git a/datasette/__init__.py b/datasette/__init__.py index 47d2b4f6..ea10c13d 100644 --- a/datasette/__init__.py +++ b/datasette/__init__.py @@ -1,8 +1,5 @@ -from datasette.permissions import Permission # noqa from datasette.version import __version_info__, __version__ # noqa -from datasette.events import Event # noqa from datasette.utils.asgi import Forbidden, NotFound, Request, Response # noqa from datasette.utils import actor_matches_allow # noqa -from datasette.views import Context # noqa from .hookspecs import hookimpl # noqa from .hookspecs import hookspec # noqa diff --git a/datasette/app.py b/datasette/app.py index b9955925..282c0984 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -1,49 +1,33 @@ -from __future__ import annotations - -from asgi_csrf import Errors import asyncio -import contextvars -from typing import TYPE_CHECKING, Any, Dict, Iterable, List - -if TYPE_CHECKING: - from datasette.permissions import AllowedResource, Resource +from typing import Sequence, Union, Tuple, Optional import asgi_csrf import collections -import dataclasses import datetime import functools import glob import hashlib import httpx -import importlib.metadata import inspect from itsdangerous import BadSignature import json import os +import pkg_resources import re import secrets import sys import threading -import time -import types import urllib.parse from concurrent import futures from pathlib import Path from markupsafe import Markup, escape from itsdangerous import URLSafeSerializer -from jinja2 import ( - ChoiceLoader, - Environment, - FileSystemLoader, - PrefixLoader, -) +from jinja2 import ChoiceLoader, Environment, FileSystemLoader, PrefixLoader from jinja2.environment import Template from jinja2.exceptions import TemplateNotFound -from .events import Event -from .views import Context -from .views.database import database_download, DatabaseView, TableCreateView, QueryView +from .views.base import ureg +from .views.database import DatabaseDownload, DatabaseView, TableCreateView from .views.index import IndexView from .views.special import ( JsonDataView, @@ -55,55 +39,37 @@ from .views.special import ( AllowDebugView, PermissionsDebugView, MessagesDebugView, - AllowedResourcesView, - PermissionRulesView, - PermissionCheckView, - TablesView, - InstanceSchemaView, - DatabaseSchemaView, - TableSchemaView, -) -from .views.table import ( - TableInsertView, - TableUpsertView, - TableDropView, - table_view, ) +from .views.table import TableView, TableInsertView, TableUpsertView, TableDropView from .views.row import RowView, RowDeleteView, RowUpdateView from .renderer import json_renderer from .url_builder import Urls from .database import Database, QueryInterrupted from .utils import ( - PaginatedResources, PrefixedUrlString, SPATIALITE_FUNCTIONS, StartupError, async_call_with_supported_arguments, await_me_maybe, - baseconv, call_with_supported_arguments, - detect_json1, display_actor, escape_css_string, escape_sqlite, find_spatialite, format_bytes, module_from_path, - move_plugins_and_allow, - move_table_config, parse_metadata, resolve_env_secrets, resolve_routes, tilde_decode, - tilde_encode, to_css_class, urlsafe_components, - redact_keys, row_sql_params_pks, ) from .utils.asgi import ( AsgiLifespan, + Base400, Forbidden, NotFound, DatabaseNotFound, @@ -111,10 +77,11 @@ from .utils.asgi import ( RowNotFound, Request, Response, - AsgiRunOnFirstRequest, asgi_static, asgi_send, asgi_send_file, + asgi_send_html, + asgi_send_json, asgi_send_redirect, ) from .utils.internal_db import init_internal_db, populate_schema_tables @@ -126,44 +93,11 @@ from .tracer import AsgiTracer from .plugins import pm, DEFAULT_PLUGINS, get_plugins from .version import __version__ -from .resources import DatabaseResource, TableResource - app_root = Path(__file__).parent.parent - -# Context variable to track when code is executing within a datasette.client request -_in_datasette_client = contextvars.ContextVar("in_datasette_client", default=False) - - -class _DatasetteClientContext: - """Context manager to mark code as executing within a datasette.client request.""" - - def __enter__(self): - self.token = _in_datasette_client.set(True) - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - _in_datasette_client.reset(self.token) - return False - - -@dataclasses.dataclass -class PermissionCheck: - """Represents a logged permission check for debugging purposes.""" - - when: str - actor: Dict[str, Any] | None - action: str - parent: str | None - child: str | None - result: bool - - # https://github.com/simonw/datasette/issues/283#issuecomment-781591015 SQLITE_LIMIT_ATTACHED = 10 -INTERNAL_DB_NAME = "__INTERNAL__" - Setting = collections.namedtuple("Setting", ("name", "default", "help")) SETTINGS = ( Setting("default_page_size", 100, "Default page size for the table view"), @@ -207,11 +141,6 @@ SETTINGS = ( True, "Allow users to create and use signed API tokens", ), - Setting( - "default_allow_sql", - True, - "Allow anyone to run arbitrary SQL queries", - ), Setting( "max_signed_tokens_ttl", 0, @@ -265,11 +194,6 @@ DEFAULT_SETTINGS = {option.name: option.default for option in SETTINGS} FAVICON_PATH = app_root / "datasette" / "static" / "favicon.png" -DEFAULT_NOT_SET = object() - - -ResourcesSQL = collections.namedtuple("ResourcesSQL", ("sql", "params")) - async def favicon(request, send): await asgi_send_file( @@ -286,13 +210,6 @@ ResolvedRow = collections.namedtuple( ) -def _to_string(value): - if isinstance(value, str): - return value - else: - return json.dumps(value, default=str) - - class Datasette: # Message constants: INFO = 1 @@ -306,7 +223,6 @@ class Datasette: cache_headers=True, cors=False, inspect_data=None, - config=None, metadata=None, sqlite_extensions=None, template_dir=None, @@ -320,8 +236,6 @@ class Datasette: pdb=False, crossdb=False, nolock=False, - internal=None, - default_deny=False, ): self._startup_invoked = False assert config_dir is None or isinstance( @@ -330,8 +244,6 @@ class Datasette: self.config_dir = config_dir self.pdb = pdb self._secret = secret or secrets.token_hex(32) - if files is not None and isinstance(files, str): - raise ValueError("files= must be a list of paths, not a string") self.files = tuple(files or []) + tuple(immutables or []) if config_dir: db_files = [] @@ -352,7 +264,6 @@ class Datasette: self.inspect_data = inspect_data self.immutables = set(immutables or []) self.databases = collections.OrderedDict() - self.actions = {} # .invoke_startup() will populate this try: self._refresh_schemas_lock = asyncio.Lock() except RuntimeError as rex: @@ -370,21 +281,19 @@ class Datasette: self.add_database( Database(self, is_mutable=False, is_memory=True), name="_memory" ) + # memory_name is a random string so that each Datasette instance gets its own + # unique in-memory named database - otherwise unit tests can fail with weird + # errors when different instances accidentally share an in-memory database + self.add_database( + Database(self, memory_name=secrets.token_hex()), name="_internal" + ) + self.internal_db_created = False for file in self.files: self.add_database( Database(self, file, is_mutable=file not in self.immutables) ) - - self.internal_db_created = False - if internal is None: - self._internal_database = Database(self, memory_name=secrets.token_hex()) - else: - self._internal_database = Database(self, path=internal, mode="rwc") - self._internal_database.name = INTERNAL_DB_NAME - self.cache_headers = cache_headers self.cors = cors - config_files = [] metadata_files = [] if config_dir: metadata_files = [ @@ -392,26 +301,9 @@ class Datasette: for filename in ("metadata.json", "metadata.yaml", "metadata.yml") if (config_dir / filename).exists() ] - config_files = [ - config_dir / filename - for filename in ("datasette.json", "datasette.yaml", "datasette.yml") - if (config_dir / filename).exists() - ] if config_dir and metadata_files and not metadata: with metadata_files[0].open() as fp: metadata = parse_metadata(fp.read()) - - if config_dir and config_files and not config: - with config_files[0].open() as fp: - config = parse_metadata(fp.read()) - - # Move any "plugins" and "allow" settings from metadata to config - updates them in place - metadata = metadata or {} - config = config or {} - metadata, config = move_plugins_and_allow(metadata, config) - # Now migrate any known table configuration settings over as well - metadata, config = move_table_config(metadata, config) - self._metadata_local = metadata or {} self.sqlite_extensions = [] for extension in sqlite_extensions or []: @@ -430,46 +322,17 @@ class Datasette: if config_dir and (config_dir / "static").is_dir() and not static_mounts: static_mounts = [("static", str((config_dir / "static").resolve()))] self.static_mounts = static_mounts or [] - if config_dir and (config_dir / "datasette.json").exists() and not config: - config = json.loads((config_dir / "datasette.json").read_text()) - - config = config or {} - config_settings = config.get("settings") or {} - - # Validate settings from config file - for key, value in config_settings.items(): - if key not in DEFAULT_SETTINGS: - raise StartupError(f"Invalid setting '{key}' in config file") - # Validate type matches expected type from DEFAULT_SETTINGS - if value is not None: # Allow None/null values - expected_type = type(DEFAULT_SETTINGS[key]) - actual_type = type(value) - if actual_type != expected_type: - raise StartupError( - f"Setting '{key}' in config file has incorrect type. " - f"Expected {expected_type.__name__}, got {actual_type.__name__}. " - f"Value: {value!r}. " - f"Hint: In YAML/JSON config files, remove quotes from boolean and integer values." - ) - - # Validate settings from constructor parameter - if settings: - for key, value in settings.items(): + if config_dir and (config_dir / "config.json").exists(): + raise StartupError("config.json should be renamed to settings.json") + if config_dir and (config_dir / "settings.json").exists() and not settings: + settings = json.loads((config_dir / "settings.json").read_text()) + # Validate those settings + for key in settings: if key not in DEFAULT_SETTINGS: - raise StartupError(f"Invalid setting '{key}' in settings parameter") - if value is not None: - expected_type = type(DEFAULT_SETTINGS[key]) - actual_type = type(value) - if actual_type != expected_type: - raise StartupError( - f"Setting '{key}' in settings parameter has incorrect type. " - f"Expected {expected_type.__name__}, got {actual_type.__name__}. " - f"Value: {value!r}" - ) - - self.config = config - # CLI settings should overwrite datasette.json settings - self._settings = dict(DEFAULT_SETTINGS, **(config_settings), **(settings or {})) + raise StartupError( + "Invalid setting '{}' in settings.json".format(key) + ) + self._settings = dict(DEFAULT_SETTINGS, **(settings or {})) self.renderers = {} # File extension -> (renderer, can_render) functions self.version_note = version_note if self.setting("num_sql_threads") == 0: @@ -515,79 +378,18 @@ class Datasette: ), ] ) - environment = Environment( - loader=template_loader, - autoescape=True, - enable_async=True, - # undefined=StrictUndefined, + self.jinja_env = Environment( + loader=template_loader, autoescape=True, enable_async=True ) - environment.filters["escape_css_string"] = escape_css_string - environment.filters["quote_plus"] = urllib.parse.quote_plus - self._jinja_env = environment - environment.filters["escape_sqlite"] = escape_sqlite - environment.filters["to_css_class"] = to_css_class + self.jinja_env.filters["escape_css_string"] = escape_css_string + self.jinja_env.filters["quote_plus"] = urllib.parse.quote_plus + self.jinja_env.filters["escape_sqlite"] = escape_sqlite + self.jinja_env.filters["to_css_class"] = to_css_class self._register_renderers() self._permission_checks = collections.deque(maxlen=200) self._root_token = secrets.token_hex(32) - self.root_enabled = False - self.default_deny = default_deny self.client = DatasetteClient(self) - async def apply_metadata_json(self): - # Apply any metadata entries from metadata.json to the internal tables - # step 1: top-level metadata - for key in self._metadata_local or {}: - if key == "databases": - continue - value = self._metadata_local[key] - await self.set_instance_metadata(key, _to_string(value)) - - # step 2: database-level metadata - for dbname, db in self._metadata_local.get("databases", {}).items(): - for key, value in db.items(): - if key in ("tables", "queries"): - continue - await self.set_database_metadata(dbname, key, _to_string(value)) - - # step 3: table-level metadata - for tablename, table in db.get("tables", {}).items(): - for key, value in table.items(): - if key == "columns": - continue - await self.set_resource_metadata( - dbname, tablename, key, _to_string(value) - ) - - # step 4: column-level metadata (only descriptions in metadata.json) - for columnname, column_description in table.get("columns", {}).items(): - await self.set_column_metadata( - dbname, tablename, columnname, "description", column_description - ) - - # TODO(alex) is metadata.json was loaded in, and --internal is not memory, then log - # a warning to user that they should delete their metadata.json file - - def get_jinja_environment(self, request: Request = None) -> Environment: - environment = self._jinja_env - if request: - for environment in pm.hook.jinja2_environment_from_request( - datasette=self, request=request, env=environment - ): - pass - return environment - - def get_action(self, name_or_abbr: str): - """ - Returns an Action object for the given name or abbreviation. Returns None if not found. - """ - if name_or_abbr in self.actions: - return self.actions[name_or_abbr] - # Try abbreviation - for action in self.actions.values(): - if action.abbr == name_or_abbr: - return action - return None - async def refresh_schemas(self): if self._refresh_schemas_lock.locked(): return @@ -595,44 +397,28 @@ class Datasette: await self._refresh_schemas() async def _refresh_schemas(self): - internal_db = self.get_internal_database() + internal_db = self.databases["_internal"] if not self.internal_db_created: await init_internal_db(internal_db) - await self.apply_metadata_json() self.internal_db_created = True + current_schema_versions = { row["database_name"]: row["schema_version"] for row in await internal_db.execute( - "select database_name, schema_version from catalog_databases" + "select database_name, schema_version from databases" ) } - # Delete stale entries for databases that are no longer attached - stale_databases = set(current_schema_versions.keys()) - set( - self.databases.keys() - ) - for stale_db_name in stale_databases: - await internal_db.execute_write( - "DELETE FROM catalog_databases WHERE database_name = ?", - [stale_db_name], - ) for database_name, db in self.databases.items(): schema_version = (await db.execute("PRAGMA schema_version")).first()[0] # Compare schema versions to see if we should skip it if schema_version == current_schema_versions.get(database_name): continue - placeholders = "(?, ?, ?, ?)" - values = [database_name, str(db.path), db.is_memory, schema_version] - if db.path is None: - placeholders = "(?, null, ?, ?)" - values = [database_name, db.is_memory, schema_version] await internal_db.execute_write( """ - INSERT OR REPLACE INTO catalog_databases (database_name, path, is_memory, schema_version) - VALUES {} - """.format( - placeholders - ), - values, + INSERT OR REPLACE INTO databases (database_name, path, is_memory, schema_version) + VALUES (?, ?, ?, ?) + """, + [database_name, str(db.path), db.is_memory, schema_version], ) await populate_schema_tables(internal_db, db) @@ -640,57 +426,12 @@ class Datasette: def urls(self): return Urls(self) - @property - def pm(self): - """ - Return the global plugin manager instance. - - This provides access to the pluggy PluginManager that manages all - Datasette plugins and hooks. Use datasette.pm.hook.hook_name() to - call plugin hooks. - """ - return pm - async def invoke_startup(self): # This must be called for Datasette to be in a usable state if self._startup_invoked: return - # Register event classes - event_classes = [] - for hook in pm.hook.register_events(datasette=self): - extra_classes = await await_me_maybe(hook) - if extra_classes: - event_classes.extend(extra_classes) - self.event_classes = tuple(event_classes) - - # Register actions, but watch out for duplicate name/abbr - action_names = {} - action_abbrs = {} - for hook in pm.hook.register_actions(datasette=self): - if hook: - for action in hook: - if ( - action.name in action_names - and action != action_names[action.name] - ): - raise StartupError( - "Duplicate action name: {}".format(action.name) - ) - if ( - action.abbr - and action.abbr in action_abbrs - and action != action_abbrs[action.abbr] - ): - raise StartupError( - "Duplicate action abbr: {}".format(action.abbr) - ) - action_names[action.name] = action - if action.abbr: - action_abbrs[action.abbr] = action - self.actions[action.name] = action - for hook in pm.hook.prepare_jinja2_environment( - env=self._jinja_env, datasette=self + env=self.jinja_env, datasette=self ): await await_me_maybe(hook) for hook in pm.hook.startup(datasette=self): @@ -703,53 +444,6 @@ class Datasette: def unsign(self, signed, namespace="default"): return URLSafeSerializer(self._secret, namespace).loads(signed) - def in_client(self) -> bool: - """Check if the current code is executing within a datasette.client request. - - Returns: - bool: True if currently executing within a datasette.client request, False otherwise. - """ - return _in_datasette_client.get() - - def create_token( - self, - actor_id: str, - *, - expires_after: int | None = None, - restrict_all: Iterable[str] | None = None, - restrict_database: Dict[str, Iterable[str]] | None = None, - restrict_resource: Dict[str, Dict[str, Iterable[str]]] | None = None, - ): - token = {"a": actor_id, "t": int(time.time())} - if expires_after: - token["d"] = expires_after - - def abbreviate_action(action): - # rename to abbr if possible - action_obj = self.actions.get(action) - if not action_obj: - return action - return action_obj.abbr or action - - if expires_after: - token["d"] = expires_after - if restrict_all or restrict_database or restrict_resource: - token["_r"] = {} - if restrict_all: - token["_r"]["a"] = [abbreviate_action(a) for a in restrict_all] - if restrict_database: - token["_r"]["d"] = {} - for database, actions in restrict_database.items(): - token["_r"]["d"][database] = [abbreviate_action(a) for a in actions] - if restrict_resource: - token["_r"]["r"] = {} - for database, resources in restrict_resource.items(): - for resource, actions in resources.items(): - token["_r"]["r"].setdefault(database, {})[resource] = [ - abbreviate_action(a) for a in actions - ] - return "dstok_{}".format(self.sign(token, namespace="token")) - def get_database(self, name=None, route=None): if route is not None: matches = [db for db in self.databases.values() if db.route == route] @@ -757,7 +451,8 @@ class Datasette: raise KeyError return matches[0] if name is None: - name = [key for key in self.databases.keys()][0] + # Return first database that isn't "_internal" + name = [key for key in self.databases.keys() if key != "_internal"][0] return self.databases[name] def add_database(self, db, name=None, route=None): @@ -779,13 +474,10 @@ class Datasette: self.databases = new_databases return db - def add_memory_database(self, memory_name, name=None, route=None): - return self.add_database( - Database(self, memory_name=memory_name), name=name, route=route - ) + def add_memory_database(self, memory_name): + return self.add_database(Database(self, memory_name=memory_name)) def remove_database(self, name): - self.get_database(name).close() new_databases = self.databases.copy() new_databases.pop(name) self.databases = new_databases @@ -808,156 +500,69 @@ class Datasette: orig[key] = upd_value return orig - async def get_instance_metadata(self): - rows = await self.get_internal_database().execute( - """ - SELECT - key, - value - FROM metadata_instance - """ - ) - return dict(rows) + def metadata(self, key=None, database=None, table=None, fallback=True): + """ + Looks up metadata, cascading backwards from specified level. + Returns None if metadata value is not found. + """ + assert not ( + database is None and table is not None + ), "Cannot call metadata() with table= specified but not database=" + metadata = {} - async def get_database_metadata(self, database_name: str): - rows = await self.get_internal_database().execute( - """ - SELECT - key, - value - FROM metadata_databases - WHERE database_name = ? - """, - [database_name], - ) - return dict(rows) + for hook_dbs in pm.hook.get_metadata( + datasette=self, key=key, database=database, table=table + ): + metadata = self._metadata_recursive_update(metadata, hook_dbs) - async def get_resource_metadata(self, database_name: str, resource_name: str): - rows = await self.get_internal_database().execute( - """ - SELECT - key, - value - FROM metadata_resources - WHERE database_name = ? - AND resource_name = ? - """, - [database_name, resource_name], - ) - return dict(rows) + # security precaution!! don't allow anything in the local config + # to be overwritten. this is a temporary measure, not sure if this + # is a good idea long term or maybe if it should just be a concern + # of the plugin's implemtnation + metadata = self._metadata_recursive_update(metadata, self._metadata_local) - async def get_column_metadata( - self, database_name: str, resource_name: str, column_name: str - ): - rows = await self.get_internal_database().execute( - """ - SELECT - key, - value - FROM metadata_columns - WHERE database_name = ? - AND resource_name = ? - AND column_name = ? - """, - [database_name, resource_name, column_name], - ) - return dict(rows) + databases = metadata.get("databases") or {} - async def set_instance_metadata(self, key: str, value: str): - # TODO upsert only supported on SQLite 3.24.0 (2018-06-04) - await self.get_internal_database().execute_write( - """ - INSERT INTO metadata_instance(key, value) - VALUES(?, ?) - ON CONFLICT(key) DO UPDATE SET value = excluded.value; - """, - [key, value], - ) + search_list = [] + if database is not None: + search_list.append(databases.get(database) or {}) + if table is not None: + table_metadata = ((databases.get(database) or {}).get("tables") or {}).get( + table + ) or {} + search_list.insert(0, table_metadata) - async def set_database_metadata(self, database_name: str, key: str, value: str): - # TODO upsert only supported on SQLite 3.24.0 (2018-06-04) - await self.get_internal_database().execute_write( - """ - INSERT INTO metadata_databases(database_name, key, value) - VALUES(?, ?, ?) - ON CONFLICT(database_name, key) DO UPDATE SET value = excluded.value; - """, - [database_name, key, value], - ) + search_list.append(metadata) + if not fallback: + # No fallback allowed, so just use the first one in the list + search_list = search_list[:1] + if key is not None: + for item in search_list: + if key in item: + return item[key] + return None + else: + # Return the merged list + m = {} + for item in search_list: + m.update(item) + return m - async def set_resource_metadata( - self, database_name: str, resource_name: str, key: str, value: str - ): - # TODO upsert only supported on SQLite 3.24.0 (2018-06-04) - await self.get_internal_database().execute_write( - """ - INSERT INTO metadata_resources(database_name, resource_name, key, value) - VALUES(?, ?, ?, ?) - ON CONFLICT(database_name, resource_name, key) DO UPDATE SET value = excluded.value; - """, - [database_name, resource_name, key, value], - ) - - async def set_column_metadata( - self, - database_name: str, - resource_name: str, - column_name: str, - key: str, - value: str, - ): - # TODO upsert only supported on SQLite 3.24.0 (2018-06-04) - await self.get_internal_database().execute_write( - """ - INSERT INTO metadata_columns(database_name, resource_name, column_name, key, value) - VALUES(?, ?, ?, ?, ?) - ON CONFLICT(database_name, resource_name, column_name, key) DO UPDATE SET value = excluded.value; - """, - [database_name, resource_name, column_name, key, value], - ) - - def get_internal_database(self): - return self._internal_database + @property + def _metadata(self): + return self.metadata() def plugin_config(self, plugin_name, database=None, table=None, fallback=True): """Return config for plugin, falling back from specified database/table""" - if database is None and table is None: - config = self._plugin_config_top(plugin_name) - else: - config = self._plugin_config_nested(plugin_name, database, table, fallback) - - return resolve_env_secrets(config, os.environ) - - def _plugin_config_top(self, plugin_name): - """Returns any top-level plugin configuration for the specified plugin.""" - return ((self.config or {}).get("plugins") or {}).get(plugin_name) - - def _plugin_config_nested(self, plugin_name, database, table=None, fallback=True): - """Returns any database or table-level plugin configuration for the specified plugin.""" - db_config = ((self.config or {}).get("databases") or {}).get(database) - - # if there's no db-level configuration, then return early, falling back to top-level if needed - if not db_config: - return self._plugin_config_top(plugin_name) if fallback else None - - db_plugin_config = (db_config.get("plugins") or {}).get(plugin_name) - - if table: - table_plugin_config = ( - ((db_config.get("tables") or {}).get(table) or {}).get("plugins") or {} - ).get(plugin_name) - - # fallback to db_config or top-level config, in that order, if needed - if table_plugin_config is None and fallback: - return db_plugin_config or self._plugin_config_top(plugin_name) - - return table_plugin_config - - # fallback to top-level if needed - if db_plugin_config is None and fallback: - self._plugin_config_top(plugin_name) - - return db_plugin_config + plugins = self.metadata( + "plugins", database=database, table=table, fallback=fallback + ) + if plugins is None: + return None + plugin_config = plugins.get(plugin_name) + # Resolve any $file and $env keys + plugin_config = resolve_env_secrets(plugin_config, os.environ) + return plugin_config def app_css_hash(self): if not hasattr(self, "_app_css_hash"): @@ -968,7 +573,7 @@ class Datasette: return self._app_css_hash async def get_canned_queries(self, database_name, actor): - queries = {} + queries = self.metadata("queries", database=database_name, fallback=False) or {} for more_queries in pm.hook.canned_queries( datasette=self, database=database_name, @@ -990,10 +595,24 @@ class Datasette: if query: return query + def update_with_inherited_metadata(self, metadata): + # Fills in source/license with defaults, if available + metadata.update( + { + "source": metadata.get("source") or self.metadata("source"), + "source_url": metadata.get("source_url") or self.metadata("source_url"), + "license": metadata.get("license") or self.metadata("license"), + "license_url": metadata.get("license_url") + or self.metadata("license_url"), + "about": metadata.get("about") or self.metadata("about"), + "about_url": metadata.get("about_url") or self.metadata("about_url"), + } + ) + def _prepare_connection(self, conn, database): conn.row_factory = sqlite3.Row conn.text_factory = lambda x: str(x, "utf-8", "replace") - if self.sqlite_extensions and database != INTERNAL_DB_NAME: + if self.sqlite_extensions: conn.enable_load_extension(True) for extension in self.sqlite_extensions: # "extension" is either a string path to the extension @@ -1006,8 +625,7 @@ class Datasette: if self.setting("cache_size_kb"): conn.execute(f"PRAGMA cache_size=-{self.setting('cache_size_kb')}") # pylint: disable=no-member - if database != INTERNAL_DB_NAME: - pm.hook.prepare_connection(conn=conn, database=database, datasette=self) + pm.hook.prepare_connection(conn=conn, database=database, datasette=self) # If self.crossdb and this is _memory, connect the first SQLITE_LIMIT_ATTACHED databases if self.crossdb and database == "_memory": count = 0 @@ -1050,14 +668,17 @@ class Datasette: if request: actor = request.actor # Top-level link - if await self.allowed(action="view-instance", actor=actor): + if await self.permission_allowed( + actor=actor, action="view-instance", default=True + ): crumbs.append({"href": self.urls.instance(), "label": "home"}) # Database link if database: - if await self.allowed( - action="view-database", - resource=DatabaseResource(database=database), + if await self.permission_allowed( actor=actor, + action="view-database", + resource=database, + default=True, ): crumbs.append( { @@ -1068,10 +689,11 @@ class Datasette: # Table link if table: assert database, "table= requires database=" - if await self.allowed( - action="view-table", - resource=TableResource(database=database, table=table), + if await self.permission_allowed( actor=actor, + action="view-table", + resource=(database, table), + default=True, ): crumbs.append( { @@ -1081,372 +703,98 @@ class Datasette: ) return crumbs - async def actors_from_ids( - self, actor_ids: Iterable[str | int] - ) -> Dict[int | str, Dict]: - result = pm.hook.actors_from_ids(datasette=self, actor_ids=actor_ids) + async def permission_allowed(self, actor, action, resource=None, default=False): + """Check permissions using the permissions_allowed plugin hook""" + result = None + for check in pm.hook.permission_allowed( + datasette=self, + actor=actor, + action=action, + resource=resource, + ): + check = await await_me_maybe(check) + if check is not None: + result = check + used_default = False if result is None: - # Do the default thing - return {actor_id: {"id": actor_id} for actor_id in actor_ids} - result = await await_me_maybe(result) + result = default + used_default = True + self._permission_checks.append( + { + "when": datetime.datetime.utcnow().isoformat(), + "actor": actor, + "action": action, + "resource": resource, + "used_default": used_default, + "result": result, + } + ) return result - async def track_event(self, event: Event): - assert isinstance(event, self.event_classes), "Invalid event type: {}".format( - type(event) - ) - for hook in pm.hook.track_event(datasette=self, event=event): - await await_me_maybe(hook) - - def resource_for_action(self, action: str, parent: str | None, child: str | None): + async def ensure_permissions( + self, + actor: dict, + permissions: Sequence[Union[Tuple[str, Union[str, Tuple[str, str]]], str]], + ): """ - Create a Resource instance for the given action with parent/child values. + permissions is a list of (action, resource) tuples or 'action' strings - Looks up the action's resource_class and instantiates it with the - provided parent and child identifiers. - - Args: - action: The action name (e.g., "view-table", "view-query") - parent: The parent resource identifier (e.g., database name) - child: The child resource identifier (e.g., table/query name) - - Returns: - A Resource instance of the appropriate subclass - - Raises: - ValueError: If the action is unknown + Raises datasette.Forbidden() if any of the checks fail """ - from datasette.permissions import Resource - - action_obj = self.actions.get(action) - if not action_obj: - raise ValueError(f"Unknown action: {action}") - - resource_class = action_obj.resource_class - instance = object.__new__(resource_class) - Resource.__init__(instance, parent=parent, child=child) - return instance + assert actor is None or isinstance(actor, dict), "actor must be None or a dict" + for permission in permissions: + if isinstance(permission, str): + action = permission + resource = None + elif isinstance(permission, (tuple, list)) and len(permission) == 2: + action, resource = permission + else: + assert ( + False + ), "permission should be string or tuple of two items: {}".format( + repr(permission) + ) + ok = await self.permission_allowed( + actor, + action, + resource=resource, + default=None, + ) + if ok is not None: + if ok: + return + else: + raise Forbidden(action) async def check_visibility( self, actor: dict, - action: str, - resource: "Resource" | None = None, + action: Optional[str] = None, + resource: Optional[Union[str, Tuple[str, str]]] = None, + permissions: Optional[ + Sequence[Union[Tuple[str, Union[str, Tuple[str, str]]], str]] + ] = None, ): - """ - Check if actor can see a resource and if it's private. - - Returns (visible, private) tuple: - - visible: bool - can the actor see it? - - private: bool - if visible, can anonymous users NOT see it? - """ - from datasette.permissions import Resource - - # Validate that resource is a Resource object or None - if resource is not None and not isinstance(resource, Resource): - raise TypeError(f"resource must be a Resource subclass instance or None.") - - # Check if actor can see it - if not await self.allowed(action=action, resource=resource, actor=actor): + """Returns (visible, private) - visible = can you see it, private = can others see it too""" + if permissions: + assert ( + not action and not resource + ), "Can't use action= or resource= with permissions=" + else: + permissions = [(action, resource)] + try: + await self.ensure_permissions(actor, permissions) + except Forbidden: return False, False - - # Check if anonymous user can see it (for "private" flag) - if not await self.allowed(action=action, resource=resource, actor=None): - # Actor can see it but anonymous cannot - it's private + # User can see it, but can the anonymous user see it? + try: + await self.ensure_permissions(None, permissions) + except Forbidden: + # It's visible but private return True, True - - # Both actor and anonymous can see it - it's public + # It's visible to everyone return True, False - async def allowed_resources_sql( - self, - *, - action: str, - actor: dict | None = None, - parent: str | None = None, - include_is_private: bool = False, - ) -> ResourcesSQL: - """ - Build SQL query to get all resources the actor can access for the given action. - - Args: - action: The action name (e.g., "view-table") - actor: The actor dict (or None for unauthenticated) - parent: Optional parent filter (e.g., database name) to limit results - include_is_private: If True, include is_private column showing if anonymous cannot access - - Returns a namedtuple of (query: str, params: dict) that can be executed against the internal database. - The query returns rows with (parent, child, reason) columns, plus is_private if requested. - - Example: - query, params = await datasette.allowed_resources_sql( - action="view-table", - actor=actor, - parent="mydb", - include_is_private=True - ) - result = await datasette.get_internal_database().execute(query, params) - """ - from datasette.utils.actions_sql import build_allowed_resources_sql - - action_obj = self.actions.get(action) - if not action_obj: - raise ValueError(f"Unknown action: {action}") - - sql, params = await build_allowed_resources_sql( - self, actor, action, parent=parent, include_is_private=include_is_private - ) - return ResourcesSQL(sql, params) - - async def allowed_resources( - self, - action: str, - actor: dict | None = None, - *, - parent: str | None = None, - include_is_private: bool = False, - include_reasons: bool = False, - limit: int = 100, - next: str | None = None, - ) -> PaginatedResources: - """ - Return paginated resources the actor can access for the given action. - - Uses SQL with keyset pagination to efficiently filter resources. - Returns PaginatedResources with list of Resource instances and pagination metadata. - - Args: - action: The action name (e.g., "view-table") - actor: The actor dict (or None for unauthenticated) - parent: Optional parent filter (e.g., database name) to limit results - include_is_private: If True, adds a .private attribute to each Resource - include_reasons: If True, adds a .reasons attribute with List[str] of permission reasons - limit: Maximum number of results to return (1-1000, default 100) - next: Keyset token from previous page for pagination - - Returns: - PaginatedResources with: - - resources: List of Resource objects for this page - - next: Token for next page (None if no more results) - - Example: - # Get first page of tables - page = await datasette.allowed_resources("view-table", actor, limit=50) - for table in page.resources: - print(f"{table.parent}/{table.child}") - - # Get next page - if page.next: - next_page = await datasette.allowed_resources( - "view-table", actor, limit=50, next=page.next - ) - - # With reasons for debugging - page = await datasette.allowed_resources( - "view-table", actor, include_reasons=True - ) - for table in page.resources: - print(f"{table.child}: {table.reasons}") - - # Iterate through all results with async generator - page = await datasette.allowed_resources("view-table", actor) - async for table in page.all(): - print(table.child) - """ - - action_obj = self.actions.get(action) - if not action_obj: - raise ValueError(f"Unknown action: {action}") - - # Validate and cap limit - limit = min(max(1, limit), 1000) - - # Get base SQL query - query, params = await self.allowed_resources_sql( - action=action, - actor=actor, - parent=parent, - include_is_private=include_is_private, - ) - - # Add keyset pagination WHERE clause if next token provided - if next: - try: - components = urlsafe_components(next) - if len(components) >= 2: - last_parent, last_child = components[0], components[1] - # Keyset condition: (parent > last) OR (parent = last AND child > last) - keyset_where = """ - (parent > :keyset_parent OR - (parent = :keyset_parent AND child > :keyset_child)) - """ - # Wrap original query and add keyset filter - query = f"SELECT * FROM ({query}) WHERE {keyset_where}" - params["keyset_parent"] = last_parent - params["keyset_child"] = last_child - except (ValueError, KeyError): - # Invalid token - ignore and start from beginning - pass - - # Add LIMIT (fetch limit+1 to detect if there are more results) - # Note: query from allowed_resources_sql() already includes ORDER BY parent, child - query = f"{query} LIMIT :limit" - params["limit"] = limit + 1 - - # Execute query - result = await self.get_internal_database().execute(query, params) - rows = list(result.rows) - - # Check if truncated (got more than limit rows) - truncated = len(rows) > limit - if truncated: - rows = rows[:limit] # Remove the extra row - - # Build Resource objects with optional attributes - resources = [] - for row in rows: - # row[0]=parent, row[1]=child, row[2]=reason, row[3]=is_private (if requested) - resource = self.resource_for_action(action, parent=row[0], child=row[1]) - - # Add reasons if requested - if include_reasons: - reason_json = row[2] - try: - reasons_array = ( - json.loads(reason_json) if isinstance(reason_json, str) else [] - ) - resource.reasons = [r for r in reasons_array if r is not None] - except (json.JSONDecodeError, TypeError): - resource.reasons = [reason_json] if reason_json else [] - - # Add private flag if requested - if include_is_private: - resource.private = bool(row[3]) - - resources.append(resource) - - # Generate next token if there are more results - next_token = None - if truncated and resources: - last_resource = resources[-1] - # Use tilde-encoding like table pagination - next_token = "{},{}".format( - tilde_encode(str(last_resource.parent)), - tilde_encode(str(last_resource.child)), - ) - - return PaginatedResources( - resources=resources, - next=next_token, - _datasette=self, - _action=action, - _actor=actor, - _parent=parent, - _include_is_private=include_is_private, - _include_reasons=include_reasons, - _limit=limit, - ) - - async def allowed( - self, - *, - action: str, - resource: "Resource" = None, - actor: dict | None = None, - ) -> bool: - """ - Check if actor can perform action on specific resource. - - Uses SQL to check permission for a single resource without fetching all resources. - This is efficient - it does NOT call allowed_resources() and check membership. - - For global actions, resource should be None (or omitted). - - Example: - from datasette.resources import TableResource - can_view = await datasette.allowed( - action="view-table", - resource=TableResource(database="analytics", table="users"), - actor=actor - ) - - # For global actions, resource can be omitted: - can_debug = await datasette.allowed(action="permissions-debug", actor=actor) - """ - from datasette.utils.actions_sql import check_permission_for_resource - - # For global actions, resource remains None - - # Check if this action has also_requires - if so, check that action first - action_obj = self.actions.get(action) - if action_obj and action_obj.also_requires: - # Must have the required action first - if not await self.allowed( - action=action_obj.also_requires, - resource=resource, - actor=actor, - ): - return False - - # For global actions, resource is None - parent = resource.parent if resource else None - child = resource.child if resource else None - - result = await check_permission_for_resource( - datasette=self, - actor=actor, - action=action, - parent=parent, - child=child, - ) - - # Log the permission check for debugging - self._permission_checks.append( - PermissionCheck( - when=datetime.datetime.now(datetime.timezone.utc).isoformat(), - actor=actor, - action=action, - parent=parent, - child=child, - result=result, - ) - ) - - return result - - async def ensure_permission( - self, - *, - action: str, - resource: "Resource" = None, - actor: dict | None = None, - ): - """ - Check if actor can perform action on resource, raising Forbidden if not. - - This is a convenience wrapper around allowed() that raises Forbidden - instead of returning False. Use this when you want to enforce a permission - check and halt execution if it fails. - - Example: - from datasette.resources import TableResource - - # Will raise Forbidden if actor cannot view the table - await datasette.ensure_permission( - action="view-table", - resource=TableResource(database="analytics", table="users"), - actor=request.actor - ) - - # For instance-level actions, resource can be omitted: - await datasette.ensure_permission( - action="permissions-debug", - actor=request.actor - ) - """ - if not await self.allowed(action=action, resource=resource, actor=actor): - raise Forbidden(action) - async def execute( self, db_name, @@ -1466,7 +814,7 @@ class Datasette: log_sql_errors=log_sql_errors, ) - async def expand_foreign_keys(self, actor, database, table, column, values): + async def expand_foreign_keys(self, database, table, column, values): """Returns dict mapping (column, value) -> label""" labeled_fks = {} db = self.databases[database] @@ -1480,19 +828,7 @@ class Datasette: ][0] except IndexError: return {} - # Ensure user has permission to view the referenced table - from datasette.resources import TableResource - - other_table = fk["other_table"] - other_column = fk["other_column"] - visible, _ = await self.check_visibility( - actor, - action="view-table", - resource=TableResource(database=database, table=other_table), - ) - if not visible: - return {} - label_column = await db.label_column_for_table(other_table) + label_column = await db.label_column_for_table(fk["other_table"]) if not label_column: return {(fk["column"], value): str(value) for value in values} labeled_fks = {} @@ -1501,9 +837,9 @@ class Datasette: from {other_table} where {other_column} in ({placeholders}) """.format( - other_column=escape_sqlite(other_column), + other_column=escape_sqlite(fk["other_column"]), label_column=escape_sqlite(label_column), - other_table=escape_sqlite(other_table), + other_table=escape_sqlite(fk["other_table"]), placeholders=", ".join(["?"] * len(set(values))), ) try: @@ -1521,6 +857,11 @@ class Datasette: url = "https://" + url[len("http://") :] return url + def _register_custom_units(self): + """Register any custom units defined in the metadata.json with Pint""" + for unit in self.metadata("custom_units") or []: + ureg.define(unit) + def _connected_databases(self): return [ { @@ -1533,14 +874,16 @@ class Datasette: "hash": d.hash, } for name, d in self.databases.items() + if name != "_internal" ] def _versions(self): conn = sqlite3.connect(":memory:") self._prepare_connection(conn, "_memory") sqlite_version = conn.execute("select sqlite_version()").fetchone()[0] - sqlite_extensions = {"json1": detect_json1(conn)} + sqlite_extensions = {} for extension, testsql, hasversion in ( + ("json1", "SELECT json('{}')", False), ("spatialite", "SELECT spatialite_version()", True), ): try: @@ -1604,9 +947,9 @@ class Datasette: if using_pysqlite3: for package in ("pysqlite3", "pysqlite3-binary"): try: - info["pysqlite3"] = importlib.metadata.version(package) + info["pysqlite3"] = pkg_resources.get_distribution(package).version break - except importlib.metadata.PackageNotFoundError: + except pkg_resources.DistributionNotFound: pass return info @@ -1641,39 +984,24 @@ class Datasette: {"name": t.name, "ident": t.ident, "daemon": t.daemon} for t in threads ], } - tasks = asyncio.all_tasks() - d.update( - { - "num_tasks": len(tasks), - "tasks": [_cleaner_task_str(t) for t in tasks], - } - ) + # Only available in Python 3.7+ + if hasattr(asyncio, "all_tasks"): + tasks = asyncio.all_tasks() + d.update( + { + "num_tasks": len(tasks), + "tasks": [_cleaner_task_str(t) for t in tasks], + } + ) return d def _actor(self, request): return {"actor": request.actor} - def _actions(self): - return [ - { - "name": action.name, - "abbr": action.abbr, - "description": action.description, - "takes_parent": action.takes_parent, - "takes_child": action.takes_child, - "resource_class": ( - action.resource_class.__name__ if action.resource_class else None - ), - "also_requires": action.also_requires, - } - for action in sorted(self.actions.values(), key=lambda a: a.name) - ] - - async def table_config(self, database: str, table: str) -> dict: - """Return dictionary of configuration for specified table""" + def table_metadata(self, database, table): + """Fetch table-specific metadata.""" return ( - (self.config or {}) - .get("databases", {}) + (self.metadata("databases") or {}) .get(database, {}) .get("tables", {}) .get(table, {}) @@ -1701,11 +1029,7 @@ class Datasette: ) async def render_template( - self, - templates: List[str] | str | Template, - context: Dict[str, Any] | Context | None = None, - request: Request | None = None, - view_name: str | None = None, + self, templates, context=None, request=None, view_name=None ): if not self._startup_invoked: raise Exception("render_template() called before await ds.invoke_startup()") @@ -1715,9 +1039,7 @@ class Datasette: else: if isinstance(templates, str): templates = [templates] - template = self.get_jinja_environment(request).select_template(templates) - if dataclasses.is_dataclass(context): - context = dataclasses.asdict(context) + template = self.jinja_env.select_template(templates) body_scripts = [] # pylint: disable=no-member for extra_script in pm.hook.extra_body_script( @@ -1803,18 +1125,6 @@ class Datasette: return await template.render_async(template_context) - def set_actor_cookie( - self, response: Response, actor: dict, expire_after: int | None = None - ): - data = {"a": actor} - if expire_after: - expires_at = int(time.time()) + (24 * 60 * 60) - data["e"] = baseconv.base62.encode(expires_at) - response.set_cookie("ds_actor", self.sign(data, "actor")) - - def delete_actor_cookie(self, response: Response): - response.set_cookie("ds_actor", "", expires=0, max_age=0) - async def _asset_urls(self, key, template, context, request, view_name): # Flatten list-of-lists from plugins: seen_urls = set() @@ -1830,7 +1140,7 @@ class Datasette: ): hook = await await_me_maybe(hook) collected.extend(hook) - collected.extend((self.config or {}).get(key) or []) + collected.extend(self.metadata(key) or []) output = [] for url_or_dict in collected: if isinstance(url_or_dict, dict): @@ -1855,11 +1165,6 @@ class Datasette: output.append(script) return output - def _config(self): - return redact_keys( - self.config, ("secret", "key", "password", "token", "hash", "dsn") - ) - def _routes(self): routes = [] @@ -1871,8 +1176,6 @@ class Datasette: routes.append((regex, view)) add_route(IndexView.as_view(self), r"/(\.(?Pjsono?))?$") - add_route(IndexView.as_view(self), r"/-/(\.(?Pjsono?))?$") - add_route(permanent_redirect("/-/"), r"/-$") # TODO: /favicon.ico and /-/static/ deserve far-future cache expires add_route(favicon, "/favicon.ico") @@ -1902,6 +1205,10 @@ class Datasette: ), r"/:memory:(?P.*)$", ) + add_route( + JsonDataView.as_view(self, "metadata.json", lambda: self.metadata()), + r"/-/metadata(\.(?Pjson))?$", + ) add_route( JsonDataView.as_view(self, "versions.json", self._versions), r"/-/versions(\.(?Pjson))?$", @@ -1917,8 +1224,12 @@ class Datasette: r"/-/settings(\.(?Pjson))?$", ) add_route( - JsonDataView.as_view(self, "config.json", lambda: self._config()), - r"/-/config(\.(?Pjson))?$", + permanent_redirect("/-/settings.json"), + r"/-/config.json", + ) + add_route( + permanent_redirect("/-/settings"), + r"/-/config", ) add_route( JsonDataView.as_view(self, "threads.json", self._threads), @@ -1929,21 +1240,9 @@ class Datasette: r"/-/databases(\.(?Pjson))?$", ) add_route( - JsonDataView.as_view( - self, "actor.json", self._actor, needs_request=True, permission=None - ), + JsonDataView.as_view(self, "actor.json", self._actor, needs_request=True), r"/-/actor(\.(?Pjson))?$", ) - add_route( - JsonDataView.as_view( - self, - "actions.json", - self._actions, - template="debug_actions.html", - permission="permissions-debug", - ), - r"/-/actions(\.(?Pjson))?$", - ) add_route( AuthTokenView.as_view(self), r"/-/auth-token$", @@ -1956,14 +1255,6 @@ class Datasette: ApiExplorerView.as_view(self), r"/-/api$", ) - add_route( - TablesView.as_view(self), - r"/-/tables(\.(?Pjson))?$", - ) - add_route( - InstanceSchemaView.as_view(self), - r"/-/schema(\.(?Pjson|md))?$", - ) add_route( LogoutView.as_view(self), r"/-/logout$", @@ -1972,18 +1263,6 @@ class Datasette: PermissionsDebugView.as_view(self), r"/-/permissions$", ) - add_route( - AllowedResourcesView.as_view(self), - r"/-/allowed(\.(?Pjson))?$", - ) - add_route( - PermissionRulesView.as_view(self), - r"/-/rules(\.(?Pjson))?$", - ) - add_route( - PermissionCheckView.as_view(self), - r"/-/check(\.(?Pjson))?$", - ) add_route( MessagesDebugView.as_view(self), r"/-/messages$", @@ -1993,28 +1272,16 @@ class Datasette: r"/-/allow-debug$", ) add_route( - wrap_view(PatternPortfolioView, self), + PatternPortfolioView.as_view(self), r"/-/patterns$", ) + add_route(DatabaseDownload.as_view(self), r"/(?P[^\/\.]+)\.db$") add_route( - wrap_view(database_download, self), - r"/(?P[^\/\.]+)\.db$", - ) - add_route( - wrap_view(DatabaseView, self), - r"/(?P[^\/\.]+)(\.(?P\w+))?$", + DatabaseView.as_view(self), r"/(?P[^\/\.]+)(\.(?P\w+))?$" ) add_route(TableCreateView.as_view(self), r"/(?P[^\/\.]+)/-/create$") add_route( - DatabaseSchemaView.as_view(self), - r"/(?P[^\/\.]+)/-/schema(\.(?Pjson|md))?$", - ) - add_route( - wrap_view(QueryView, self), - r"/(?P[^\/\.]+)/-/query(\.(?P\w+))?$", - ) - add_route( - wrap_view(table_view, self), + TableView.as_view(self), r"/(?P[^\/\.]+)/(?P[^\/\.]+)(\.(?P\w+))?$", ) add_route( @@ -2033,10 +1300,6 @@ class Datasette: TableDropView.as_view(self), r"/(?P[^\/\.]+)/(?P
[^\/\.]+)/-/drop$", ) - add_route( - TableSchemaView.as_view(self), - r"/(?P[^\/\.]+)/(?P
[^\/\.]+)/-/schema(\.(?Pjson|md))?$", - ) add_route( RowDeleteView.as_view(self), r"/(?P[^\/\.]+)/(?P
[^/]+?)/(?P[^/]+?)/-/delete$", @@ -2056,7 +1319,9 @@ class Datasette: try: return self.get_database(route=database_route) except KeyError: - raise DatabaseNotFound(database_route) + raise DatabaseNotFound( + "Database not found: {}".format(database_route), database_route + ) async def resolve_table(self, request): db = await self.resolve_database(request) @@ -2067,7 +1332,9 @@ class Datasette: if not table_exists: is_view = await db.view_exists(table_name) if not (table_exists or is_view): - raise TableNotFound(db.name, table_name) + raise TableNotFound( + "Table not found: {}".format(table_name), db.name, table_name + ) return ResolvedTable(db, table_name, is_view) async def resolve_row(self, request): @@ -2077,30 +1344,22 @@ class Datasette: results = await db.execute(sql, params, truncate=True) row = results.first() if row is None: - raise RowNotFound(db.name, table_name, pk_values) + raise RowNotFound( + "Row not found: {}".format(pk_values), db.name, table_name, pk_values + ) return ResolvedRow(db, table_name, sql, params, pks, pk_values, results.first()) def app(self): """Returns an ASGI app function that serves the whole of Datasette""" routes = self._routes() + self._register_custom_units() async def setup_db(): # First time server starts up, calculate table counts for immutable databases - for database in self.databases.values(): + for dbname, database in self.databases.items(): if not database.is_mutable: await database.table_counts(limit=60 * 60 * 1000) - async def custom_csrf_error(scope, send, message_id): - await asgi_send( - send, - content=await self.render_template( - "csrf_error.html", - {"message_id": message_id, "message_name": Errors(message_id).name}, - ), - status=403, - content_type="text/html; charset=utf-8", - ) - asgi = asgi_csrf.asgi_csrf( DatasetteRouter(self, routes), signing_secret=self._secret, @@ -2108,12 +1367,13 @@ class Datasette: skip_if_scope=lambda scope: any( pm.hook.skip_csrf(datasette=self, scope=scope) ), - send_csrf_failed=custom_csrf_error, ) if self.setting("trace_debug"): asgi = AsgiTracer(asgi) - asgi = AsgiLifespan(asgi) - asgi = AsgiRunOnFirstRequest(asgi, on_startup=[setup_db, self.invoke_startup]) + asgi = AsgiLifespan( + asgi, + on_startup=setup_db, + ) for wrapper in pm.hook.asgi_wrapper(datasette=self): asgi = wrapper(asgi) return asgi @@ -2123,6 +1383,16 @@ class DatasetteRouter: def __init__(self, datasette, routes): self.ds = datasette self.routes = routes or [] + # Build a list of pages/blah/{name}.html matching expressions + pattern_templates = [ + filepath + for filepath in self.ds.jinja_env.list_templates() + if "{" in filepath and filepath.startswith("pages/") + ] + self.page_routes = [ + (route_pattern_from_filepath(filepath[len("pages/") :]), filepath) + for filepath in pattern_templates + ] async def __call__(self, scope, receive, send): # Because we care about "foo/bar" v.s. "foo%2Fbar" we decode raw_path ourselves @@ -2222,24 +1492,13 @@ class DatasetteRouter: route_path = request.scope.get("route_path", request.scope["path"]) # Jinja requires template names to use "/" even on Windows template_name = "pages" + route_path + ".html" - # Build a list of pages/blah/{name}.html matching expressions - environment = self.ds.get_jinja_environment(request) - pattern_templates = [ - filepath - for filepath in environment.list_templates() - if "{" in filepath and filepath.startswith("pages/") - ] - page_routes = [ - (route_pattern_from_filepath(filepath[len("pages/") :]), filepath) - for filepath in pattern_templates - ] try: - template = environment.select_template([template_name]) + template = self.ds.jinja_env.select_template([template_name]) except TemplateNotFound: template = None if template is None: # Try for a pages/blah/{name}.html template match - for regex, wildcard_template in page_routes: + for regex, wildcard_template in self.page_routes: match = regex.match(route_path) if match is not None: context.update(match.groupdict()) @@ -2327,43 +1586,7 @@ def _cleaner_task_str(task): return _cleaner_task_str_re.sub("", s) -def wrap_view(view_fn_or_class, datasette): - is_function = isinstance(view_fn_or_class, types.FunctionType) - if is_function: - return wrap_view_function(view_fn_or_class, datasette) - else: - if not isinstance(view_fn_or_class, type): - raise ValueError("view_fn_or_class must be a function or a class") - return wrap_view_class(view_fn_or_class, datasette) - - -def wrap_view_class(view_class, datasette): - async def async_view_for_class(request, send): - instance = view_class() - if inspect.iscoroutinefunction(instance.__call__): - return await async_call_with_supported_arguments( - instance.__call__, - scope=request.scope, - receive=request.receive, - send=send, - request=request, - datasette=datasette, - ) - else: - return call_with_supported_arguments( - instance.__call__, - scope=request.scope, - receive=request.receive, - send=send, - request=request, - datasette=datasette, - ) - - async_view_for_class.view_class = view_class - return async_view_for_class - - -def wrap_view_function(view_fn, datasette): +def wrap_view(view_fn, datasette): @functools.wraps(view_fn) async def async_view_fn(request, send): if inspect.iscoroutinefunction(view_fn): @@ -2427,22 +1650,9 @@ class NotFoundExplicit(NotFound): class DatasetteClient: - """Internal HTTP client for making requests to a Datasette instance. - - Used for testing and for internal operations that need to make HTTP requests - to the Datasette app without going through an actual HTTP server. - """ - def __init__(self, ds): self.ds = ds - - @property - def app(self): - return self.ds.app() - - def actor_cookie(self, actor): - # Utility method, mainly for tests - return self.ds.sign({"a": actor}, "actor") + self.app = ds.app() def _fix(self, path, avoid_path_rewrites=False): if not isinstance(path, PrefixedUrlString) and not avoid_path_rewrites: @@ -2451,89 +1661,45 @@ class DatasetteClient: path = f"http://localhost{path}" return path - async def _request(self, method, path, skip_permission_checks=False, **kwargs): - from datasette.permissions import SkipPermissions + async def get(self, path, **kwargs): + await self.ds.invoke_startup() + async with httpx.AsyncClient(app=self.app) as client: + return await client.get(self._fix(path), **kwargs) - with _DatasetteClientContext(): - if skip_permission_checks: - with SkipPermissions(): - async with httpx.AsyncClient( - transport=httpx.ASGITransport(app=self.app), - cookies=kwargs.pop("cookies", None), - ) as client: - return await getattr(client, method)(self._fix(path), **kwargs) - else: - async with httpx.AsyncClient( - transport=httpx.ASGITransport(app=self.app), - cookies=kwargs.pop("cookies", None), - ) as client: - return await getattr(client, method)(self._fix(path), **kwargs) + async def options(self, path, **kwargs): + await self.ds.invoke_startup() + async with httpx.AsyncClient(app=self.app) as client: + return await client.options(self._fix(path), **kwargs) - async def get(self, path, skip_permission_checks=False, **kwargs): - return await self._request( - "get", path, skip_permission_checks=skip_permission_checks, **kwargs - ) + async def head(self, path, **kwargs): + await self.ds.invoke_startup() + async with httpx.AsyncClient(app=self.app) as client: + return await client.head(self._fix(path), **kwargs) - async def options(self, path, skip_permission_checks=False, **kwargs): - return await self._request( - "options", path, skip_permission_checks=skip_permission_checks, **kwargs - ) + async def post(self, path, **kwargs): + await self.ds.invoke_startup() + async with httpx.AsyncClient(app=self.app) as client: + return await client.post(self._fix(path), **kwargs) - async def head(self, path, skip_permission_checks=False, **kwargs): - return await self._request( - "head", path, skip_permission_checks=skip_permission_checks, **kwargs - ) + async def put(self, path, **kwargs): + await self.ds.invoke_startup() + async with httpx.AsyncClient(app=self.app) as client: + return await client.put(self._fix(path), **kwargs) - async def post(self, path, skip_permission_checks=False, **kwargs): - return await self._request( - "post", path, skip_permission_checks=skip_permission_checks, **kwargs - ) + async def patch(self, path, **kwargs): + await self.ds.invoke_startup() + async with httpx.AsyncClient(app=self.app) as client: + return await client.patch(self._fix(path), **kwargs) - async def put(self, path, skip_permission_checks=False, **kwargs): - return await self._request( - "put", path, skip_permission_checks=skip_permission_checks, **kwargs - ) - - async def patch(self, path, skip_permission_checks=False, **kwargs): - return await self._request( - "patch", path, skip_permission_checks=skip_permission_checks, **kwargs - ) - - async def delete(self, path, skip_permission_checks=False, **kwargs): - return await self._request( - "delete", path, skip_permission_checks=skip_permission_checks, **kwargs - ) - - async def request(self, method, path, skip_permission_checks=False, **kwargs): - """Make an HTTP request with the specified method. - - Args: - method: HTTP method (e.g., "GET", "POST", "PUT") - path: The path to request - skip_permission_checks: If True, bypass all permission checks for this request - **kwargs: Additional arguments to pass to httpx - - Returns: - httpx.Response: The response from the request - """ - from datasette.permissions import SkipPermissions + async def delete(self, path, **kwargs): + await self.ds.invoke_startup() + async with httpx.AsyncClient(app=self.app) as client: + return await client.delete(self._fix(path), **kwargs) + async def request(self, method, path, **kwargs): + await self.ds.invoke_startup() avoid_path_rewrites = kwargs.pop("avoid_path_rewrites", None) - with _DatasetteClientContext(): - if skip_permission_checks: - with SkipPermissions(): - async with httpx.AsyncClient( - transport=httpx.ASGITransport(app=self.app), - cookies=kwargs.pop("cookies", None), - ) as client: - return await client.request( - method, self._fix(path, avoid_path_rewrites), **kwargs - ) - else: - async with httpx.AsyncClient( - transport=httpx.ASGITransport(app=self.app), - cookies=kwargs.pop("cookies", None), - ) as client: - return await client.request( - method, self._fix(path, avoid_path_rewrites), **kwargs - ) + async with httpx.AsyncClient(app=self.app) as client: + return await client.request( + method, self._fix(path, avoid_path_rewrites), **kwargs + ) diff --git a/datasette/cli.py b/datasette/cli.py index 21420491..6eb42712 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -4,17 +4,16 @@ import click from click import formatting from click.types import CompositeParamType from click_default_group import DefaultGroup -import functools import json import os import pathlib -from runpy import run_module import shutil from subprocess import call import sys -import textwrap +from runpy import run_module import webbrowser from .app import ( + OBSOLETE_SETTINGS, Datasette, DEFAULT_SETTINGS, SETTINGS, @@ -25,13 +24,11 @@ from .utils import ( LoadExtension, StartupError, check_connection, - deep_dict_update, find_spatialite, parse_metadata, ConnectionProblem, SpatialiteConnectionProblem, initial_path_for_datasette, - pairs_to_nested_config, temporary_docker_directory, value_as_boolean, SpatialiteNotFound, @@ -42,18 +39,6 @@ from .utils.sqlite import sqlite3 from .utils.testing import TestClient from .version import __version__ - -def run_sync(coro_func): - """Run an async callable to completion on a fresh event loop.""" - loop = asyncio.new_event_loop() - try: - asyncio.set_event_loop(loop) - return loop.run_until_complete(coro_func()) - finally: - asyncio.set_event_loop(None) - loop.close() - - # Use Rich for tracebacks if it is installed try: from rich.traceback import install @@ -63,65 +48,93 @@ except ImportError: pass +class Config(click.ParamType): + # This will be removed in Datasette 1.0 in favour of class Setting + name = "config" + + def convert(self, config, param, ctx): + if ":" not in config: + self.fail(f'"{config}" should be name:value', param, ctx) + return + name, value = config.split(":", 1) + if name not in DEFAULT_SETTINGS: + msg = ( + OBSOLETE_SETTINGS.get(name) + or f"{name} is not a valid option (--help-settings to see all)" + ) + self.fail( + msg, + param, + ctx, + ) + return + # Type checking + default = DEFAULT_SETTINGS[name] + if isinstance(default, bool): + try: + return name, value_as_boolean(value) + except ValueAsBooleanError: + self.fail(f'"{name}" should be on/off/true/false/1/0', param, ctx) + return + elif isinstance(default, int): + if not value.isdigit(): + self.fail(f'"{name}" should be an integer', param, ctx) + return + return name, int(value) + elif isinstance(default, str): + return name, value + else: + # Should never happen: + self.fail("Invalid option") + + class Setting(CompositeParamType): name = "setting" arity = 2 def convert(self, config, param, ctx): name, value = config - if name in DEFAULT_SETTINGS: - # For backwards compatibility with how this worked prior to - # Datasette 1.0, we turn bare setting names into setting.name - # Type checking for those older settings - default = DEFAULT_SETTINGS[name] - name = "settings.{}".format(name) - if isinstance(default, bool): - try: - return name, "true" if value_as_boolean(value) else "false" - except ValueAsBooleanError: - self.fail(f'"{name}" should be on/off/true/false/1/0', param, ctx) - elif isinstance(default, int): - if not value.isdigit(): - self.fail(f'"{name}" should be an integer', param, ctx) - return name, value - elif isinstance(default, str): - return name, value - else: - # Should never happen: - self.fail("Invalid option") - return name, value + if name not in DEFAULT_SETTINGS: + msg = ( + OBSOLETE_SETTINGS.get(name) + or f"{name} is not a valid option (--help-settings to see all)" + ) + self.fail( + msg, + param, + ctx, + ) + return + # Type checking + default = DEFAULT_SETTINGS[name] + if isinstance(default, bool): + try: + return name, value_as_boolean(value) + except ValueAsBooleanError: + self.fail(f'"{name}" should be on/off/true/false/1/0', param, ctx) + return + elif isinstance(default, int): + if not value.isdigit(): + self.fail(f'"{name}" should be an integer', param, ctx) + return + return name, int(value) + elif isinstance(default, str): + return name, value + else: + # Should never happen: + self.fail("Invalid option") def sqlite_extensions(fn): - fn = click.option( + return click.option( "sqlite_extensions", "--load-extension", type=LoadExtension(), - envvar="DATASETTE_LOAD_EXTENSION", + envvar="SQLITE_EXTENSIONS", multiple=True, help="Path to a SQLite extension to load, and optional entrypoint", )(fn) - # Wrap it in a custom error handler - @functools.wraps(fn) - def wrapped(*args, **kwargs): - try: - return fn(*args, **kwargs) - except AttributeError as e: - if "enable_load_extension" in str(e): - raise click.ClickException( - textwrap.dedent( - """ - Your Python installation does not have the ability to load SQLite extensions. - - More information: https://datasette.io/help/extensions - """ - ).strip() - ) - raise - - return wrapped - @click.group(cls=DefaultGroup, default="serve", default_if_no_args=True) @click.version_option(version=__version__) @@ -146,7 +159,9 @@ def inspect(files, inspect_file, sqlite_extensions): This can then be passed to "datasette --inspect-file" to speed up count operations against immutable database files. """ - inspect_data = run_sync(lambda: inspect_(files, sqlite_extensions)) + app = Datasette([], immutables=files, sqlite_extensions=sqlite_extensions) + loop = asyncio.get_event_loop() + inspect_data = loop.run_until_complete(inspect_(files, sqlite_extensions)) if inspect_file == "-": sys.stdout.write(json.dumps(inspect_data, indent=2)) else: @@ -158,6 +173,9 @@ async def inspect_(files, sqlite_extensions): app = Datasette([], immutables=files, sqlite_extensions=sqlite_extensions) data = {} for name, database in app.databases.items(): + if name == "_internal": + # Don't include the in-memory _internal database + continue counts = await database.table_counts(limit=3600 * 1000) data[name] = { "hash": database.hash, @@ -183,23 +201,15 @@ pm.hook.publish_subcommand(publish=publish) @cli.command() @click.option("--all", help="Include built-in default plugins", is_flag=True) -@click.option( - "--requirements", help="Output requirements.txt of installed plugins", is_flag=True -) @click.option( "--plugins-dir", type=click.Path(exists=True, file_okay=False, dir_okay=True), help="Path to directory containing custom plugins", ) -def plugins(all, requirements, plugins_dir): +def plugins(all, plugins_dir): """List currently installed plugins""" app = Datasette([], plugins_dir=plugins_dir) - if requirements: - for plugin in app._plugins(): - if plugin["version"]: - click.echo("{}=={}".format(plugin["name"], plugin["version"])) - else: - click.echo(json.dumps(app._plugins(all=all), indent=4)) + click.echo(json.dumps(app._plugins(all=all), indent=4)) @cli.command() @@ -309,32 +319,15 @@ def package( @cli.command() -@click.argument("packages", nargs=-1) +@click.argument("packages", nargs=-1, required=True) @click.option( "-U", "--upgrade", is_flag=True, help="Upgrade packages to latest version" ) -@click.option( - "-r", - "--requirement", - type=click.Path(exists=True), - help="Install from requirements file", -) -@click.option( - "-e", - "--editable", - help="Install a project in editable mode from this path", -) -def install(packages, upgrade, requirement, editable): +def install(packages, upgrade): """Install plugins and packages from PyPI into the same environment as Datasette""" - if not packages and not requirement and not editable: - raise click.UsageError("Please specify at least one package to install") args = ["pip", "install"] if upgrade: args += ["--upgrade"] - if editable: - args += ["--editable", editable] - if requirement: - args += ["-r", requirement] args += list(packages) sys.argv = args run_module("pip", run_name="__main__") @@ -415,17 +408,16 @@ def uninstall(packages, yes): ) @click.option("--memory", is_flag=True, help="Make /_memory database available") @click.option( - "-c", "--config", - type=click.File(mode="r"), - help="Path to JSON/YAML Datasette configuration file", + type=Config(), + help="Deprecated: set config option using configname:value. Use --setting instead.", + multiple=True, ) @click.option( - "-s", "--setting", "settings", type=Setting(), - help="nested.key, value setting to use in Datasette configuration", + help="Setting, see docs.datasette.io/en/stable/settings.html", multiple=True, ) @click.option( @@ -438,28 +430,10 @@ def uninstall(packages, yes): help="Output URL that sets a cookie authenticating the root user", is_flag=True, ) -@click.option( - "--default-deny", - help="Deny all permissions by default", - is_flag=True, -) @click.option( "--get", help="Run an HTTP GET request against this path, print results and exit", ) -@click.option( - "--headers", - is_flag=True, - help="Include HTTP headers in --get output", -) -@click.option( - "--token", - help="API token to send with --get requests", -) -@click.option( - "--actor", - help="Actor to use for --get requests (JSON string)", -) @click.option("--version-note", help="Additional note to show on /-/versions") @click.option("--help-settings", is_flag=True, help="Show available settings") @click.option("--pdb", is_flag=True, help="Launch debugger on any errors") @@ -488,17 +462,10 @@ def uninstall(packages, yes): @click.option( "--ssl-keyfile", help="SSL key file", - envvar="DATASETTE_SSL_KEYFILE", ) @click.option( "--ssl-certfile", help="SSL certificate file", - envvar="DATASETTE_SSL_CERTFILE", -) -@click.option( - "--internal", - type=click.Path(), - help="Path to a persistent Datasette internal SQLite database", ) def serve( files, @@ -519,11 +486,7 @@ def serve( settings, secret, root, - default_deny, get, - headers, - token, - actor, version_note, help_settings, pdb, @@ -533,7 +496,6 @@ def serve( nolock, ssl_keyfile, ssl_certfile, - internal, return_instance=False, ): """Serve up specified SQLite database files with a web UI""" @@ -554,8 +516,6 @@ def serve( reloader = hupper.start_reloader("datasette.cli.serve") if immutable: reloader.watch_files(immutable) - if config: - reloader.watch_files([config.name]) if metadata: reloader.watch_files([metadata.name]) @@ -568,55 +528,41 @@ def serve( if metadata: metadata_data = parse_metadata(metadata.read()) - config_data = None + combined_settings = {} if config: - config_data = parse_metadata(config.read()) - - config_data = config_data or {} - - # Merge in settings from -s/--setting - if settings: - settings_updates = pairs_to_nested_config(settings) - # Merge recursively, to avoid over-writing nested values - # https://github.com/simonw/datasette/issues/2389 - deep_dict_update(config_data, settings_updates) + click.echo( + "--config name:value will be deprecated in Datasette 1.0, use --setting name value instead", + err=True, + ) + combined_settings.update(config) + combined_settings.update(settings) kwargs = dict( immutables=immutable, cache_headers=not reload, cors=cors, inspect_data=inspect_data, - config=config_data, metadata=metadata_data, sqlite_extensions=sqlite_extensions, template_dir=template_dir, plugins_dir=plugins_dir, static_mounts=static, - settings=None, # These are passed in config= now + settings=combined_settings, memory=memory, secret=secret, version_note=version_note, pdb=pdb, crossdb=crossdb, nolock=nolock, - internal=internal, - default_deny=default_deny, ) - # Separate directories from files - directories = [f for f in files if os.path.isdir(f)] - file_paths = [f for f in files if not os.path.isdir(f)] - - # Handle config_dir - only one directory allowed - if len(directories) > 1: - raise click.ClickException( - "Cannot pass multiple directories. Pass a single directory as config_dir." - ) - elif len(directories) == 1: - kwargs["config_dir"] = pathlib.Path(directories[0]) + # if files is a single directory, use that as config_dir= + if 1 == len(files) and os.path.isdir(files[0]): + kwargs["config_dir"] = pathlib.Path(files[0]) + files = [] # Verify list of files, create if needed (and --create) - for file in file_paths: + for file in files: if not pathlib.Path(file).exists(): if create: sqlite3.connect(file).execute("vacuum") @@ -627,32 +573,8 @@ def serve( ) ) - # Check for duplicate files by resolving all paths to their absolute forms - # Collect all database files that will be loaded (explicit files + config_dir files) - all_db_files = [] - - # Add explicit files - for file in file_paths: - all_db_files.append((file, pathlib.Path(file).resolve())) - - # Add config_dir databases if config_dir is set - if "config_dir" in kwargs: - config_dir = kwargs["config_dir"] - for ext in ("db", "sqlite", "sqlite3"): - for db_file in config_dir.glob(f"*.{ext}"): - all_db_files.append((str(db_file), db_file.resolve())) - - # Check for duplicates - seen = {} - for original_path, resolved_path in all_db_files: - if resolved_path in seen: - raise click.ClickException( - f"Duplicate database file: '{original_path}' and '{seen[resolved_path]}' " - f"both refer to {resolved_path}" - ) - seen[resolved_path] = original_path - - files = file_paths + # De-duplicate files so 'datasette db.db db.db' only attaches one /db + files = list(dict.fromkeys(files)) try: ds = Datasette(files, **kwargs) @@ -666,38 +588,15 @@ def serve( return ds # Run the "startup" plugin hooks - run_sync(ds.invoke_startup) + asyncio.get_event_loop().run_until_complete(ds.invoke_startup()) # Run async soundness checks - but only if we're not under pytest - run_sync(lambda: check_databases(ds)) - - if headers and not get: - raise click.ClickException("--headers can only be used with --get") - - if token and not get: - raise click.ClickException("--token can only be used with --get") + asyncio.get_event_loop().run_until_complete(check_databases(ds)) if get: client = TestClient(ds) - request_headers = {} - if token: - request_headers["Authorization"] = "Bearer {}".format(token) - cookies = {} - if actor: - cookies["ds_actor"] = client.actor_cookie(json.loads(actor)) - response = client.get(get, headers=request_headers, cookies=cookies) - - if headers: - # Output HTTP status code, headers, two newlines, then the response body - click.echo(f"HTTP/1.1 {response.status}") - for key, value in response.headers.items(): - click.echo(f"{key}: {value}") - if response.text: - click.echo() - click.echo(response.text) - else: - click.echo(response.text) - + response = client.get(get) + click.echo(response.text) exit_code = 0 if response.status == 200 else 1 sys.exit(exit_code) return @@ -705,15 +604,16 @@ def serve( # Start the server url = None if root: - ds.root_enabled = True url = "http://{}:{}{}?token={}".format( host, port, ds.urls.path("-/auth-token"), ds._root_token ) - click.echo(url) + print(url) if open_browser: if url is None: # Figure out most convenient URL - to table, database or homepage - path = run_sync(lambda: initial_path_for_datasette(ds)) + path = asyncio.get_event_loop().run_until_complete( + initial_path_for_datasette(ds) + ) url = f"http://{host}:{port}{path}" webbrowser.open(url) uvicorn_kwargs = dict( @@ -728,131 +628,6 @@ def serve( uvicorn.run(ds.app(), **uvicorn_kwargs) -@cli.command() -@click.argument("id") -@click.option( - "--secret", - help="Secret used for signing the API tokens", - envvar="DATASETTE_SECRET", - required=True, -) -@click.option( - "-e", - "--expires-after", - help="Token should expire after this many seconds", - type=int, -) -@click.option( - "alls", - "-a", - "--all", - type=str, - metavar="ACTION", - multiple=True, - help="Restrict token to this action", -) -@click.option( - "databases", - "-d", - "--database", - type=(str, str), - metavar="DB ACTION", - multiple=True, - help="Restrict token to this action on this database", -) -@click.option( - "resources", - "-r", - "--resource", - type=(str, str, str), - metavar="DB RESOURCE ACTION", - multiple=True, - help="Restrict token to this action on this database resource (a table, SQL view or named query)", -) -@click.option( - "--debug", - help="Show decoded token", - is_flag=True, -) -@click.option( - "--plugins-dir", - type=click.Path(exists=True, file_okay=False, dir_okay=True), - help="Path to directory containing custom plugins", -) -def create_token( - id, secret, expires_after, alls, databases, resources, debug, plugins_dir -): - """ - Create a signed API token for the specified actor ID - - Example: - - datasette create-token root --secret mysecret - - To allow only "view-database-download" for all databases: - - \b - datasette create-token root --secret mysecret \\ - --all view-database-download - - To allow "create-table" against a specific database: - - \b - datasette create-token root --secret mysecret \\ - --database mydb create-table - - To allow "insert-row" against a specific table: - - \b - datasette create-token root --secret myscret \\ - --resource mydb mytable insert-row - - Restricted actions can be specified multiple times using - multiple --all, --database, and --resource options. - - Add --debug to see a decoded version of the token. - """ - ds = Datasette(secret=secret, plugins_dir=plugins_dir) - - # Run ds.invoke_startup() in an event loop - run_sync(ds.invoke_startup) - - # Warn about any unknown actions - actions = [] - actions.extend(alls) - actions.extend([p[1] for p in databases]) - actions.extend([p[2] for p in resources]) - for action in actions: - if not ds.actions.get(action): - click.secho( - f" Unknown permission: {action} ", - fg="red", - err=True, - ) - - restrict_database = {} - for database, action in databases: - restrict_database.setdefault(database, []).append(action) - restrict_resource = {} - for database, resource, action in resources: - restrict_resource.setdefault(database, {}).setdefault(resource, []).append( - action - ) - - token = ds.create_token( - id, - expires_after=expires_after, - restrict_all=alls, - restrict_database=restrict_database, - restrict_resource=restrict_resource, - ) - click.echo(token) - if debug: - encoded = token[len("dstok_") :] - click.echo("\nDecoded:\n") - click.echo(json.dumps(ds.unsign(encoded, namespace="token"), indent=2)) - - pm.hook.register_commands(cli=cli) diff --git a/datasette/database.py b/datasette/database.py index e5858128..d8043c24 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -3,7 +3,6 @@ from collections import namedtuple from pathlib import Path import janus import queue -import sqlite_utils import sys import threading import uuid @@ -15,13 +14,11 @@ from .utils import ( detect_spatialite, get_all_foreign_keys, get_outbound_foreign_keys, - md5_not_usedforsecurity, sqlite_timelimit, sqlite3, table_columns, table_column_details, ) -from .utils.sqlite import sqlite_version from .inspect import inspect_hash connections = threading.local() @@ -30,22 +27,10 @@ AttachedDatabase = namedtuple("AttachedDatabase", ("seq", "name", "file")) class Database: - # For table counts stop at this many rows: - count_limit = 10000 - _thread_local_id_counter = 1 - def __init__( - self, - ds, - path=None, - is_mutable=True, - is_memory=False, - memory_name=None, - mode=None, + self, ds, path=None, is_mutable=True, is_memory=False, memory_name=None ): self.name = None - self._thread_local_id = f"x{self._thread_local_id_counter}" - Database._thread_local_id_counter += 1 self.route = None self.ds = ds self.path = path @@ -64,7 +49,6 @@ class Database: self._write_connection = None # This is used to track all file connections so they can be closed self._all_file_connections = [] - self.mode = mode @property def cached_table_counts(self): @@ -78,12 +62,6 @@ class Database: } return self._cached_table_counts - @property - def color(self): - if self.hash: - return self.hash[:6] - return md5_not_usedforsecurity(self.name)[:6] - def suggest_name(self): if self.path: return Path(self.path).stem @@ -93,20 +71,18 @@ class Database: return "db" def connect(self, write=False): - extra_kwargs = {} - if write: - extra_kwargs["isolation_level"] = "IMMEDIATE" if self.memory_name: uri = "file:{}?mode=memory&cache=shared".format(self.memory_name) conn = sqlite3.connect( - uri, uri=True, check_same_thread=False, **extra_kwargs + uri, + uri=True, + check_same_thread=False, ) if not write: conn.execute("PRAGMA query_only=1") return conn if self.is_memory: return sqlite3.connect(":memory:", uri=True) - # mode=ro or immutable=1? if self.is_mutable: qs = "?mode=ro" @@ -117,10 +93,8 @@ class Database: assert not (write and not self.is_mutable) if write: qs = "" - if self.mode is not None: - qs = f"?mode={self.mode}" conn = sqlite3.connect( - f"file:{self.path}{qs}", uri=True, check_same_thread=False, **extra_kwargs + f"file:{self.path}{qs}", uri=True, check_same_thread=False ) self._all_file_connections.append(conn) return conn @@ -132,7 +106,8 @@ class Database: async def execute_write(self, sql, params=None, block=True): def _inner(conn): - return conn.execute(sql, params or []) + with conn: + return conn.execute(sql, params or []) with trace("sql", database=self.name, sql=sql.strip(), params=params): results = await self.execute_write_fn(_inner, block=block) @@ -140,12 +115,11 @@ class Database: async def execute_write_script(self, sql, block=True): def _inner(conn): - return conn.executescript(sql) + with conn: + return conn.executescript(sql) with trace("sql", database=self.name, sql=sql.strip(), executescript=True): - results = await self.execute_write_fn( - _inner, block=block, transaction=False - ) + results = await self.execute_write_fn(_inner, block=block) return results async def execute_write_many(self, sql, params_seq, block=True): @@ -158,7 +132,8 @@ class Database: count += 1 yield param - return conn.executemany(sql, count_params(params_seq)), count + with conn: + return conn.executemany(sql, count_params(params_seq)), count with trace( "sql", database=self.name, sql=sql.strip(), executemany=True @@ -167,60 +142,25 @@ class Database: kwargs["count"] = count return results - async def execute_isolated_fn(self, fn): - # Open a new connection just for the duration of this function - # blocking the write queue to avoid any writes occurring during it - if self.ds.executor is None: - # non-threaded mode - isolated_connection = self.connect(write=True) - try: - result = fn(isolated_connection) - finally: - isolated_connection.close() - try: - self._all_file_connections.remove(isolated_connection) - except ValueError: - # Was probably a memory connection - pass - return result - else: - # Threaded mode - send to write thread - return await self._send_to_write_thread(fn, isolated_connection=True) - - async def execute_write_fn(self, fn, block=True, transaction=True): + async def execute_write_fn(self, fn, block=True): if self.ds.executor is None: # non-threaded mode if self._write_connection is None: self._write_connection = self.connect(write=True) self.ds._prepare_connection(self._write_connection, self.name) - if transaction: - with self._write_connection: - return fn(self._write_connection) - else: - return fn(self._write_connection) - else: - return await self._send_to_write_thread( - fn, block=block, transaction=transaction - ) + return fn(self._write_connection) - async def _send_to_write_thread( - self, fn, block=True, isolated_connection=False, transaction=True - ): + # threaded mode + task_id = uuid.uuid5(uuid.NAMESPACE_DNS, "datasette.io") if self._write_queue is None: self._write_queue = queue.Queue() if self._write_thread is None: self._write_thread = threading.Thread( target=self._execute_writes, daemon=True ) - self._write_thread.name = "_execute_writes for database {}".format( - self.name - ) self._write_thread.start() - task_id = uuid.uuid5(uuid.NAMESPACE_DNS, "datasette.io") reply_queue = janus.Queue() - self._write_queue.put( - WriteTask(fn, task_id, reply_queue, isolated_connection, transaction) - ) + self._write_queue.put(WriteTask(fn, task_id, reply_queue)) if block: result = await reply_queue.async_q.get() if isinstance(result, Exception): @@ -245,32 +185,12 @@ class Database: if conn_exception is not None: result = conn_exception else: - if task.isolated_connection: - isolated_connection = self.connect(write=True) - try: - result = task.fn(isolated_connection) - except Exception as e: - sys.stderr.write("{}\n".format(e)) - sys.stderr.flush() - result = e - finally: - isolated_connection.close() - try: - self._all_file_connections.remove(isolated_connection) - except ValueError: - # Was probably a memory connection - pass - else: - try: - if task.transaction: - with conn: - result = task.fn(conn) - else: - result = task.fn(conn) - except Exception as e: - sys.stderr.write("{}\n".format(e)) - sys.stderr.flush() - result = e + try: + result = task.fn(conn) + except Exception as e: + sys.stderr.write("{}\n".format(e)) + sys.stderr.flush() + result = e task.reply_queue.sync_q.put(result) async def execute_fn(self, fn): @@ -283,11 +203,11 @@ class Database: # threaded mode def in_thread(): - conn = getattr(connections, self._thread_local_id, None) + conn = getattr(connections, self.name, None) if not conn: conn = self.connect() self.ds._prepare_connection(conn, self.name) - setattr(connections, self._thread_local_id, conn) + setattr(connections, self.name, conn) return fn(conn) return await asyncio.get_event_loop().run_in_executor( @@ -385,7 +305,7 @@ class Database: try: table_count = ( await self.execute( - f"select count(*) from (select * from [{table}] limit {self.count_limit + 1})", + f"select count(*) from [{table}]", custom_time_limit=limit, ) ).rows[0][0] @@ -410,12 +330,7 @@ class Database: # But SQLite prior to 3.16.0 doesn't support pragma functions results = await self.execute("PRAGMA database_list;") # {'seq': 0, 'name': 'main', 'file': ''} - return [ - AttachedDatabase(*row) - for row in results.rows - # Filter out the SQLite internal "temp" database, refs #2557 - if row["seq"] > 0 and row["name"] != "temp" - ] + return [AttachedDatabase(*row) for row in results.rows if row["seq"] > 0] async def table_exists(self, table): results = await self.execute( @@ -448,38 +363,12 @@ class Database: return await self.execute_fn(lambda conn: detect_fts(conn, table)) async def label_column_for_table(self, table): - explicit_label_column = (await self.ds.table_config(self.name, table)).get( + explicit_label_column = self.ds.table_metadata(self.name, table).get( "label_column" ) if explicit_label_column: return explicit_label_column - - def column_details(conn): - # Returns {column_name: (type, is_unique)} - db = sqlite_utils.Database(conn) - columns = db[table].columns_dict - indexes = db[table].indexes - details = {} - for name in columns: - is_unique = any( - index - for index in indexes - if index.columns == [name] and index.unique - ) - details[name] = (columns[name], is_unique) - return details - - column_details = await self.execute_fn(column_details) - # Is there just one unique column that's text? - unique_text_columns = [ - name - for name, (type_, is_unique) in column_details.items() - if is_unique and type_ is str - ] - if len(unique_text_columns) == 1: - return unique_text_columns[0] - - column_names = list(column_details.keys()) + column_names = await self.execute_fn(lambda conn: table_columns(conn, table)) # Is there a name or title column? name_or_title = [c for c in column_names if c.lower() in ("name", "title")] if name_or_title: @@ -489,7 +378,6 @@ class Database: column_names and len(column_names) == 2 and ("id" in column_names or "pk" in column_names) - and not set(column_names) == {"id", "pk"} ): return [c for c in column_names if c not in ("id", "pk")][0] # Couldn't find a label: @@ -501,107 +389,21 @@ class Database: ) async def hidden_table_names(self): - hidden_tables = [] - # Add any tables marked as hidden in config - db_config = self.ds.config.get("databases", {}).get(self.name, {}) - if "tables" in db_config: - hidden_tables += [ - t for t in db_config["tables"] if db_config["tables"][t].get("hidden") - ] - - if sqlite_version()[1] >= 37: - hidden_tables += [ - x[0] - for x in await self.execute( - """ - with shadow_tables as ( - select name - from pragma_table_list - where [type] = 'shadow' - order by name - ), - core_tables as ( - select name - from sqlite_master - WHERE name in ('sqlite_stat1', 'sqlite_stat2', 'sqlite_stat3', 'sqlite_stat4') - OR substr(name, 1, 1) == '_' - ), - combined as ( - select name from shadow_tables - union all - select name from core_tables - ) - select name from combined order by 1 + # Mark tables 'hidden' if they relate to FTS virtual tables + hidden_tables = [ + r[0] + for r in ( + await self.execute( """ + select name from sqlite_master + where rootpage = 0 + and ( + sql like '%VIRTUAL TABLE%USING FTS%' + ) or name in ('sqlite_stat1', 'sqlite_stat2', 'sqlite_stat3', 'sqlite_stat4') + """ ) - ] - else: - hidden_tables += [ - x[0] - for x in await self.execute( - """ - WITH base AS ( - SELECT name - FROM sqlite_master - WHERE name IN ('sqlite_stat1', 'sqlite_stat2', 'sqlite_stat3', 'sqlite_stat4') - OR substr(name, 1, 1) == '_' - ), - fts_suffixes AS ( - SELECT column1 AS suffix - FROM (VALUES ('_data'), ('_idx'), ('_docsize'), ('_content'), ('_config')) - ), - fts5_names AS ( - SELECT name - FROM sqlite_master - WHERE sql LIKE '%VIRTUAL TABLE%USING FTS%' - ), - fts5_shadow_tables AS ( - SELECT - printf('%s%s', fts5_names.name, fts_suffixes.suffix) AS name - FROM fts5_names - JOIN fts_suffixes - ), - fts3_suffixes AS ( - SELECT column1 AS suffix - FROM (VALUES ('_content'), ('_segdir'), ('_segments'), ('_stat'), ('_docsize')) - ), - fts3_names AS ( - SELECT name - FROM sqlite_master - WHERE sql LIKE '%VIRTUAL TABLE%USING FTS3%' - OR sql LIKE '%VIRTUAL TABLE%USING FTS4%' - ), - fts3_shadow_tables AS ( - SELECT - printf('%s%s', fts3_names.name, fts3_suffixes.suffix) AS name - FROM fts3_names - JOIN fts3_suffixes - ), - final AS ( - SELECT name FROM base - UNION ALL - SELECT name FROM fts5_shadow_tables - UNION ALL - SELECT name FROM fts3_shadow_tables - ) - SELECT name FROM final ORDER BY 1 - """ - ) - ] - # Also hide any FTS tables that have a content= argument - hidden_tables += [ - x[0] - for x in await self.execute( - """ - SELECT name - FROM sqlite_master - WHERE sql LIKE '%VIRTUAL TABLE%' - AND sql LIKE '%USING FTS%' - AND sql LIKE '%content=%' - """ - ) + ).rows ] - has_spatialite = await self.execute_fn(detect_spatialite) if has_spatialite: # Also hide Spatialite internal tables @@ -630,6 +432,21 @@ class Database: ) ).rows ] + # Add any from metadata.json + db_metadata = self.ds.metadata(database=self.name) + if "tables" in db_metadata: + hidden_tables += [ + t + for t in db_metadata["tables"] + if db_metadata["tables"][t].get("hidden") + ] + # Also mark as hidden any tables which start with the name of a hidden table + # e.g. "searchable_fts" implies "searchable_fts_content" should be hidden + for table_name in await self.table_names(): + for hidden_table in hidden_tables[:]: + if table_name.startswith(hidden_table): + hidden_tables.append(table_name) + continue return hidden_tables @@ -681,14 +498,12 @@ class Database: class WriteTask: - __slots__ = ("fn", "task_id", "reply_queue", "isolated_connection", "transaction") + __slots__ = ("fn", "task_id", "reply_queue") - def __init__(self, fn, task_id, reply_queue, isolated_connection, transaction): + def __init__(self, fn, task_id, reply_queue): self.fn = fn self.task_id = task_id self.reply_queue = reply_queue - self.isolated_connection = isolated_connection - self.transaction = transaction class QueryInterrupted(Exception): @@ -697,9 +512,6 @@ class QueryInterrupted(Exception): self.sql = sql self.params = params - def __str__(self): - return "QueryInterrupted: {}".format(self.e) - class MultipleValues(Exception): pass @@ -727,9 +539,6 @@ class Results: else: raise MultipleValues - def dicts(self): - return [dict(row) for row in self.rows] - def __iter__(self): return iter(self.rows) diff --git a/datasette/default_actions.py b/datasette/default_actions.py deleted file mode 100644 index 87d98fac..00000000 --- a/datasette/default_actions.py +++ /dev/null @@ -1,101 +0,0 @@ -from datasette import hookimpl -from datasette.permissions import Action -from datasette.resources import ( - DatabaseResource, - TableResource, - QueryResource, -) - - -@hookimpl -def register_actions(): - """Register the core Datasette actions.""" - return ( - # Global actions (no resource_class) - Action( - name="view-instance", - abbr="vi", - description="View Datasette instance", - ), - Action( - name="permissions-debug", - abbr="pd", - description="Access permission debug tool", - ), - Action( - name="debug-menu", - abbr="dm", - description="View debug menu items", - ), - # Database-level actions (parent-level) - Action( - name="view-database", - abbr="vd", - description="View database", - resource_class=DatabaseResource, - ), - Action( - name="view-database-download", - abbr="vdd", - description="Download database file", - resource_class=DatabaseResource, - also_requires="view-database", - ), - Action( - name="execute-sql", - abbr="es", - description="Execute read-only SQL queries", - resource_class=DatabaseResource, - also_requires="view-database", - ), - Action( - name="create-table", - abbr="ct", - description="Create tables", - resource_class=DatabaseResource, - ), - # Table-level actions (child-level) - Action( - name="view-table", - abbr="vt", - description="View table", - resource_class=TableResource, - ), - Action( - name="insert-row", - abbr="ir", - description="Insert rows", - resource_class=TableResource, - ), - Action( - name="delete-row", - abbr="dr", - description="Delete rows", - resource_class=TableResource, - ), - Action( - name="update-row", - abbr="ur", - description="Update rows", - resource_class=TableResource, - ), - Action( - name="alter-table", - abbr="at", - description="Alter tables", - resource_class=TableResource, - ), - Action( - name="drop-table", - abbr="dt", - description="Drop tables", - resource_class=TableResource, - ), - # Query-level actions (child-level) - Action( - name="view-query", - abbr="vq", - description="View named query results", - resource_class=QueryResource, - ), - ) diff --git a/datasette/default_magic_parameters.py b/datasette/default_magic_parameters.py index 91c1c5aa..19382207 100644 --- a/datasette/default_magic_parameters.py +++ b/datasette/default_magic_parameters.py @@ -24,12 +24,9 @@ def now(key, request): if key == "epoch": return int(time.time()) elif key == "date_utc": - return datetime.datetime.now(datetime.timezone.utc).date().isoformat() + return datetime.datetime.utcnow().date().isoformat() elif key == "datetime_utc": - return ( - datetime.datetime.now(datetime.timezone.utc).strftime(r"%Y-%m-%dT%H:%M:%S") - + "Z" - ) + return datetime.datetime.utcnow().strftime(r"%Y-%m-%dT%H:%M:%S") + "Z" else: raise KeyError diff --git a/datasette/default_menu_links.py b/datasette/default_menu_links.py index 85032387..56f481ef 100644 --- a/datasette/default_menu_links.py +++ b/datasette/default_menu_links.py @@ -4,7 +4,7 @@ from datasette import hookimpl @hookimpl def menu_links(datasette, actor): async def inner(): - if not await datasette.allowed(action="debug-menu", actor=actor): + if not await datasette.permission_allowed(actor, "debug-menu"): return [] return [ @@ -17,6 +17,10 @@ def menu_links(datasette, actor): "href": datasette.urls.path("/-/versions"), "label": "Version info", }, + { + "href": datasette.urls.path("/-/metadata"), + "label": "Metadata", + }, { "href": datasette.urls.path("/-/settings"), "label": "Settings", diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py new file mode 100644 index 00000000..3c295470 --- /dev/null +++ b/datasette/default_permissions.py @@ -0,0 +1,218 @@ +from datasette import hookimpl +from datasette.utils import actor_matches_allow +import click +import itsdangerous +import json +import time + + +@hookimpl(tryfirst=True, specname="permission_allowed") +def permission_allowed_default(datasette, actor, action, resource): + async def inner(): + # id=root gets some special permissions: + if action in ( + "permissions-debug", + "debug-menu", + "insert-row", + "create-table", + "drop-table", + "delete-row", + "update-row", + ): + if actor and actor.get("id") == "root": + return True + + # Resolve metadata view permissions + if action in ( + "view-instance", + "view-database", + "view-table", + "view-query", + "execute-sql", + ): + result = await _resolve_metadata_view_permissions( + datasette, actor, action, resource + ) + if result is not None: + return result + + # Check custom permissions: blocks + return await _resolve_metadata_permissions_blocks( + datasette, actor, action, resource + ) + + return inner + + +async def _resolve_metadata_permissions_blocks(datasette, actor, action, resource): + # Check custom permissions: blocks - not yet implemented + return None + + +async def _resolve_metadata_view_permissions(datasette, actor, action, resource): + if action == "view-instance": + allow = datasette.metadata("allow") + if allow is not None: + return actor_matches_allow(actor, allow) + elif action == "view-database": + if resource == "_internal" and (actor is None or actor.get("id") != "root"): + return False + database_allow = datasette.metadata("allow", database=resource) + if database_allow is None: + return None + return actor_matches_allow(actor, database_allow) + elif action == "view-table": + database, table = resource + tables = datasette.metadata("tables", database=database) or {} + table_allow = (tables.get(table) or {}).get("allow") + if table_allow is None: + return None + return actor_matches_allow(actor, table_allow) + elif action == "view-query": + # Check if this query has a "allow" block in metadata + database, query_name = resource + query = await datasette.get_canned_query(database, query_name, actor) + assert query is not None + allow = query.get("allow") + if allow is None: + return None + return actor_matches_allow(actor, allow) + elif action == "execute-sql": + # Use allow_sql block from database block, or from top-level + database_allow_sql = datasette.metadata("allow_sql", database=resource) + if database_allow_sql is None: + database_allow_sql = datasette.metadata("allow_sql") + if database_allow_sql is None: + return None + return actor_matches_allow(actor, database_allow_sql) + + +@hookimpl(specname="permission_allowed") +def permission_allowed_actor_restrictions(actor, action, resource): + if actor is None: + return None + if "_r" not in actor: + # No restrictions, so we have no opinion + return None + _r = actor.get("_r") + action_initials = "".join([word[0] for word in action.split("-")]) + # If _r is defined then we use those to further restrict the actor + # Crucially, we only use this to say NO (return False) - we never + # use it to return YES (True) because that might over-ride other + # restrictions placed on this actor + all_allowed = _r.get("a") + if all_allowed is not None: + assert isinstance(all_allowed, list) + if action_initials in all_allowed: + return None + # How about for the current database? + if action in ("view-database", "view-database-download", "execute-sql"): + database_allowed = _r.get("d", {}).get(resource) + if database_allowed is not None: + assert isinstance(database_allowed, list) + if action_initials in database_allowed: + return None + # Or the current table? That's any time the resource is (database, table) + if resource is not None and not isinstance(resource, str) and len(resource) == 2: + database, table = resource + table_allowed = _r.get("t", {}).get(database, {}).get(table) + # TODO: What should this do for canned queries? + if table_allowed is not None: + assert isinstance(table_allowed, list) + if action_initials in table_allowed: + return None + # This action is not specifically allowed, so reject it + return False + + +@hookimpl +def actor_from_request(datasette, request): + prefix = "dstok_" + if not datasette.setting("allow_signed_tokens"): + return None + max_signed_tokens_ttl = datasette.setting("max_signed_tokens_ttl") + authorization = request.headers.get("authorization") + if not authorization: + return None + if not authorization.startswith("Bearer "): + return None + token = authorization[len("Bearer ") :] + if not token.startswith(prefix): + return None + token = token[len(prefix) :] + try: + decoded = datasette.unsign(token, namespace="token") + except itsdangerous.BadSignature: + return None + if "t" not in decoded: + # Missing timestamp + return None + created = decoded["t"] + if not isinstance(created, int): + # Invalid timestamp + return None + duration = decoded.get("d") + if duration is not None and not isinstance(duration, int): + # Invalid duration + return None + if (duration is None and max_signed_tokens_ttl) or ( + duration is not None + and max_signed_tokens_ttl + and duration > max_signed_tokens_ttl + ): + duration = max_signed_tokens_ttl + if duration: + if time.time() - created > duration: + # Expired + return None + actor = {"id": decoded["a"], "token": "dstok"} + if "_r" in decoded: + actor["_r"] = decoded["_r"] + if duration: + actor["token_expires"] = created + duration + return actor + + +@hookimpl +def register_commands(cli): + from datasette.app import Datasette + + @cli.command() + @click.argument("id") + @click.option( + "--secret", + help="Secret used for signing the API tokens", + envvar="DATASETTE_SECRET", + required=True, + ) + @click.option( + "-e", + "--expires-after", + help="Token should expire after this many seconds", + type=int, + ) + @click.option( + "--debug", + help="Show decoded token", + is_flag=True, + ) + def create_token(id, secret, expires_after, debug): + "Create a signed API token for the specified actor ID" + ds = Datasette(secret=secret) + bits = {"a": id, "token": "dstok", "t": int(time.time())} + if expires_after: + bits["d"] = expires_after + token = ds.sign(bits, namespace="token") + click.echo("dstok_{}".format(token)) + if debug: + click.echo("\nDecoded:\n") + click.echo(json.dumps(ds.unsign(token, namespace="token"), indent=2)) + + +@hookimpl +def skip_csrf(scope): + # Skip CSRF check for requests with content-type: application/json + if scope["type"] == "http": + headers = scope.get("headers") or {} + if dict(headers).get(b"content-type") == b"application/json": + return True diff --git a/datasette/default_permissions/__init__.py b/datasette/default_permissions/__init__.py deleted file mode 100644 index 4c82d705..00000000 --- a/datasette/default_permissions/__init__.py +++ /dev/null @@ -1,59 +0,0 @@ -""" -Default permission implementations for Datasette. - -This module provides the built-in permission checking logic through implementations -of the permission_resources_sql hook. The hooks are organized by their purpose: - -1. Actor Restrictions - Enforces _r allowlists embedded in actor tokens -2. Root User - Grants full access when --root flag is used -3. Config Rules - Applies permissions from datasette.yaml -4. Default Settings - Enforces default_allow_sql and default view permissions - -IMPORTANT: These hooks return PermissionSQL objects that are combined using SQL -UNION/INTERSECT operations. The order of evaluation is: - - restriction_sql fields are INTERSECTed (all must match) - - Regular sql fields are UNIONed and evaluated with cascading priority -""" - -from __future__ import annotations - -from typing import TYPE_CHECKING, Optional - -if TYPE_CHECKING: - from datasette.app import Datasette - -from datasette import hookimpl - -# Re-export all hooks and public utilities -from .restrictions import ( - actor_restrictions_sql, - restrictions_allow_action, - ActorRestrictions, -) -from .root import root_user_permissions_sql -from .config import config_permissions_sql -from .defaults import ( - default_allow_sql_check, - default_action_permissions_sql, - DEFAULT_ALLOW_ACTIONS, -) -from .tokens import actor_from_signed_api_token - - -@hookimpl -def skip_csrf(scope) -> Optional[bool]: - """Skip CSRF check for JSON content-type requests.""" - if scope["type"] == "http": - headers = scope.get("headers") or {} - if dict(headers).get(b"content-type") == b"application/json": - return True - return None - - -@hookimpl -def canned_queries(datasette: "Datasette", database: str, actor) -> dict: - """Return canned queries defined in datasette.yaml configuration.""" - queries = ( - ((datasette.config or {}).get("databases") or {}).get(database) or {} - ).get("queries") or {} - return queries diff --git a/datasette/default_permissions/config.py b/datasette/default_permissions/config.py deleted file mode 100644 index aab87c1c..00000000 --- a/datasette/default_permissions/config.py +++ /dev/null @@ -1,442 +0,0 @@ -""" -Config-based permission handling for Datasette. - -Applies permission rules from datasette.yaml configuration. -""" - -from __future__ import annotations - -from typing import TYPE_CHECKING, Any, List, Optional, Set, Tuple - -if TYPE_CHECKING: - from datasette.app import Datasette - -from datasette import hookimpl -from datasette.permissions import PermissionSQL -from datasette.utils import actor_matches_allow - -from .helpers import PermissionRowCollector, get_action_name_variants - - -class ConfigPermissionProcessor: - """ - Processes permission rules from datasette.yaml configuration. - - Configuration structure: - - permissions: # Root-level permissions block - view-instance: - id: admin - - databases: - mydb: - permissions: # Database-level permissions - view-database: - id: admin - allow: # Database-level allow block (for view-*) - id: viewer - allow_sql: # execute-sql allow block - id: analyst - tables: - users: - permissions: # Table-level permissions - view-table: - id: admin - allow: # Table-level allow block - id: viewer - queries: - my_query: - permissions: # Query-level permissions - view-query: - id: admin - allow: # Query-level allow block - id: viewer - """ - - def __init__( - self, - datasette: "Datasette", - actor: Optional[dict], - action: str, - ): - self.datasette = datasette - self.actor = actor - self.action = action - self.config = datasette.config or {} - self.collector = PermissionRowCollector(prefix="cfg") - - # Pre-compute action variants - self.action_checks = get_action_name_variants(datasette, action) - self.action_obj = datasette.actions.get(action) - - # Parse restrictions if present - self.has_restrictions = actor and "_r" in actor if actor else False - self.restrictions = actor.get("_r", {}) if actor else {} - - # Pre-compute restriction info for efficiency - self.restricted_databases: Set[str] = set() - self.restricted_tables: Set[Tuple[str, str]] = set() - - if self.has_restrictions: - self.restricted_databases = { - db_name - for db_name, db_actions in (self.restrictions.get("d") or {}).items() - if self.action_checks.intersection(db_actions) - } - self.restricted_tables = { - (db_name, table_name) - for db_name, tables in (self.restrictions.get("r") or {}).items() - for table_name, table_actions in tables.items() - if self.action_checks.intersection(table_actions) - } - # Tables implicitly reference their parent databases - self.restricted_databases.update(db for db, _ in self.restricted_tables) - - def evaluate_allow_block(self, allow_block: Any) -> Optional[bool]: - """Evaluate an allow block against the current actor.""" - if allow_block is None: - return None - return actor_matches_allow(self.actor, allow_block) - - def is_in_restriction_allowlist( - self, - parent: Optional[str], - child: Optional[str], - ) -> bool: - """Check if resource is allowed by actor restrictions.""" - if not self.has_restrictions: - return True # No restrictions, all resources allowed - - # Check global allowlist - if self.action_checks.intersection(self.restrictions.get("a", [])): - return True - - # Check database-level allowlist - if parent and self.action_checks.intersection( - self.restrictions.get("d", {}).get(parent, []) - ): - return True - - # Check table-level allowlist - if parent: - table_restrictions = (self.restrictions.get("r", {}) or {}).get(parent, {}) - if child: - table_actions = table_restrictions.get(child, []) - if self.action_checks.intersection(table_actions): - return True - else: - # Parent query should proceed if any child in this database is allowlisted - for table_actions in table_restrictions.values(): - if self.action_checks.intersection(table_actions): - return True - - # Parent/child both None: include if any restrictions exist for this action - if parent is None and child is None: - if self.action_checks.intersection(self.restrictions.get("a", [])): - return True - if self.restricted_databases: - return True - if self.restricted_tables: - return True - - return False - - def add_permissions_rule( - self, - parent: Optional[str], - child: Optional[str], - permissions_block: Optional[dict], - scope_desc: str, - ) -> None: - """Add a rule from a permissions:{action} block.""" - if permissions_block is None: - return - - action_allow_block = permissions_block.get(self.action) - result = self.evaluate_allow_block(action_allow_block) - - self.collector.add( - parent=parent, - child=child, - allow=result, - reason=f"config {'allow' if result else 'deny'} {scope_desc}", - if_not_none=True, - ) - - def add_allow_block_rule( - self, - parent: Optional[str], - child: Optional[str], - allow_block: Any, - scope_desc: str, - ) -> None: - """ - Add rules from an allow:{} block. - - For allow blocks, if the block exists but doesn't match the actor, - this is treated as a deny. We also handle the restriction-gate logic. - """ - if allow_block is None: - return - - # Skip if resource is not in restriction allowlist - if not self.is_in_restriction_allowlist(parent, child): - return - - result = self.evaluate_allow_block(allow_block) - bool_result = bool(result) - - self.collector.add( - parent, - child, - bool_result, - f"config {'allow' if result else 'deny'} {scope_desc}", - ) - - # Handle restriction-gate: add explicit denies for restricted resources - self._add_restriction_gate_denies(parent, child, bool_result, scope_desc) - - def _add_restriction_gate_denies( - self, - parent: Optional[str], - child: Optional[str], - is_allowed: bool, - scope_desc: str, - ) -> None: - """ - When a config rule denies at a higher level, add explicit denies - for restricted resources to prevent child-level allows from - incorrectly granting access. - """ - if is_allowed or child is not None or not self.has_restrictions: - return - - if not self.action_obj: - return - - reason = f"config deny {scope_desc} (restriction gate)" - - if parent is None: - # Root-level deny: add denies for all restricted resources - if self.action_obj.takes_parent: - for db_name in self.restricted_databases: - self.collector.add(db_name, None, False, reason) - if self.action_obj.takes_child: - for db_name, table_name in self.restricted_tables: - self.collector.add(db_name, table_name, False, reason) - else: - # Database-level deny: add denies for tables in that database - if self.action_obj.takes_child: - for db_name, table_name in self.restricted_tables: - if db_name == parent: - self.collector.add(db_name, table_name, False, reason) - - def process(self) -> Optional[PermissionSQL]: - """Process all config rules and return combined PermissionSQL.""" - self._process_root_permissions() - self._process_databases() - self._process_root_allow_blocks() - - return self.collector.to_permission_sql() - - def _process_root_permissions(self) -> None: - """Process root-level permissions block.""" - root_perms = self.config.get("permissions") or {} - self.add_permissions_rule( - None, - None, - root_perms, - f"permissions for {self.action}", - ) - - def _process_databases(self) -> None: - """Process database-level and nested configurations.""" - databases = self.config.get("databases") or {} - - for db_name, db_config in databases.items(): - self._process_database(db_name, db_config or {}) - - def _process_database(self, db_name: str, db_config: dict) -> None: - """Process a single database's configuration.""" - # Database-level permissions block - db_perms = db_config.get("permissions") or {} - self.add_permissions_rule( - db_name, - None, - db_perms, - f"permissions for {self.action} on {db_name}", - ) - - # Process tables - for table_name, table_config in (db_config.get("tables") or {}).items(): - self._process_table(db_name, table_name, table_config or {}) - - # Process queries - for query_name, query_config in (db_config.get("queries") or {}).items(): - self._process_query(db_name, query_name, query_config) - - # Database-level allow blocks - self._process_database_allow_blocks(db_name, db_config) - - def _process_table( - self, - db_name: str, - table_name: str, - table_config: dict, - ) -> None: - """Process a single table's configuration.""" - # Table-level permissions block - table_perms = table_config.get("permissions") or {} - self.add_permissions_rule( - db_name, - table_name, - table_perms, - f"permissions for {self.action} on {db_name}/{table_name}", - ) - - # Table-level allow block (for view-table) - if self.action == "view-table": - self.add_allow_block_rule( - db_name, - table_name, - table_config.get("allow"), - f"allow for {self.action} on {db_name}/{table_name}", - ) - - def _process_query( - self, - db_name: str, - query_name: str, - query_config: Any, - ) -> None: - """Process a single query's configuration.""" - # Query config can be a string (just SQL) or dict - if not isinstance(query_config, dict): - return - - # Query-level permissions block - query_perms = query_config.get("permissions") or {} - self.add_permissions_rule( - db_name, - query_name, - query_perms, - f"permissions for {self.action} on {db_name}/{query_name}", - ) - - # Query-level allow block (for view-query) - if self.action == "view-query": - self.add_allow_block_rule( - db_name, - query_name, - query_config.get("allow"), - f"allow for {self.action} on {db_name}/{query_name}", - ) - - def _process_database_allow_blocks( - self, - db_name: str, - db_config: dict, - ) -> None: - """Process database-level allow/allow_sql blocks.""" - # view-database allow block - if self.action == "view-database": - self.add_allow_block_rule( - db_name, - None, - db_config.get("allow"), - f"allow for {self.action} on {db_name}", - ) - - # execute-sql allow_sql block - if self.action == "execute-sql": - self.add_allow_block_rule( - db_name, - None, - db_config.get("allow_sql"), - f"allow_sql for {db_name}", - ) - - # view-table uses database-level allow for inheritance - if self.action == "view-table": - self.add_allow_block_rule( - db_name, - None, - db_config.get("allow"), - f"allow for {self.action} on {db_name}", - ) - - # view-query uses database-level allow for inheritance - if self.action == "view-query": - self.add_allow_block_rule( - db_name, - None, - db_config.get("allow"), - f"allow for {self.action} on {db_name}", - ) - - def _process_root_allow_blocks(self) -> None: - """Process root-level allow/allow_sql blocks.""" - root_allow = self.config.get("allow") - - if self.action == "view-instance": - self.add_allow_block_rule( - None, - None, - root_allow, - "allow for view-instance", - ) - - if self.action == "view-database": - self.add_allow_block_rule( - None, - None, - root_allow, - "allow for view-database", - ) - - if self.action == "view-table": - self.add_allow_block_rule( - None, - None, - root_allow, - "allow for view-table", - ) - - if self.action == "view-query": - self.add_allow_block_rule( - None, - None, - root_allow, - "allow for view-query", - ) - - if self.action == "execute-sql": - self.add_allow_block_rule( - None, - None, - self.config.get("allow_sql"), - "allow_sql", - ) - - -@hookimpl(specname="permission_resources_sql") -async def config_permissions_sql( - datasette: "Datasette", - actor: Optional[dict], - action: str, -) -> Optional[List[PermissionSQL]]: - """ - Apply permission rules from datasette.yaml configuration. - - This processes: - - permissions: blocks at root, database, table, and query levels - - allow: blocks for view-* actions - - allow_sql: blocks for execute-sql action - """ - processor = ConfigPermissionProcessor(datasette, actor, action) - result = processor.process() - - if result is None: - return [] - - return [result] diff --git a/datasette/default_permissions/defaults.py b/datasette/default_permissions/defaults.py deleted file mode 100644 index f5a6a270..00000000 --- a/datasette/default_permissions/defaults.py +++ /dev/null @@ -1,70 +0,0 @@ -""" -Default permission settings for Datasette. - -Provides default allow rules for standard view/execute actions. -""" - -from __future__ import annotations - -from typing import TYPE_CHECKING, Optional - -if TYPE_CHECKING: - from datasette.app import Datasette - -from datasette import hookimpl -from datasette.permissions import PermissionSQL - - -# Actions that are allowed by default (unless --default-deny is used) -DEFAULT_ALLOW_ACTIONS = frozenset( - { - "view-instance", - "view-database", - "view-database-download", - "view-table", - "view-query", - "execute-sql", - } -) - - -@hookimpl(specname="permission_resources_sql") -async def default_allow_sql_check( - datasette: "Datasette", - actor: Optional[dict], - action: str, -) -> Optional[PermissionSQL]: - """ - Enforce the default_allow_sql setting. - - When default_allow_sql is false (the default), execute-sql is denied - unless explicitly allowed by config or other rules. - """ - if action == "execute-sql": - if not datasette.setting("default_allow_sql"): - return PermissionSQL.deny(reason="default_allow_sql is false") - - return None - - -@hookimpl(specname="permission_resources_sql") -async def default_action_permissions_sql( - datasette: "Datasette", - actor: Optional[dict], - action: str, -) -> Optional[PermissionSQL]: - """ - Provide default allow rules for standard view/execute actions. - - These defaults are skipped when datasette is started with --default-deny. - The restriction_sql mechanism (from actor_restrictions_sql) will still - filter these results if the actor has restrictions. - """ - if datasette.default_deny: - return None - - if action in DEFAULT_ALLOW_ACTIONS: - reason = f"default allow for {action}".replace("'", "''") - return PermissionSQL.allow(reason=reason) - - return None diff --git a/datasette/default_permissions/helpers.py b/datasette/default_permissions/helpers.py deleted file mode 100644 index 47e03569..00000000 --- a/datasette/default_permissions/helpers.py +++ /dev/null @@ -1,85 +0,0 @@ -""" -Shared helper utilities for default permission implementations. -""" - -from __future__ import annotations - -from dataclasses import dataclass -from typing import TYPE_CHECKING, List, Optional, Set - -if TYPE_CHECKING: - from datasette.app import Datasette - -from datasette.permissions import PermissionSQL - - -def get_action_name_variants(datasette: "Datasette", action: str) -> Set[str]: - """ - Get all name variants for an action (full name and abbreviation). - - Example: - get_action_name_variants(ds, "view-table") -> {"view-table", "vt"} - """ - variants = {action} - action_obj = datasette.actions.get(action) - if action_obj and action_obj.abbr: - variants.add(action_obj.abbr) - return variants - - -def action_in_list(datasette: "Datasette", action: str, action_list: list) -> bool: - """Check if an action (or its abbreviation) is in a list.""" - return bool(get_action_name_variants(datasette, action).intersection(action_list)) - - -@dataclass -class PermissionRow: - """A single permission rule row.""" - - parent: Optional[str] - child: Optional[str] - allow: bool - reason: str - - -class PermissionRowCollector: - """Collects permission rows and converts them to PermissionSQL.""" - - def __init__(self, prefix: str = "row"): - self.rows: List[PermissionRow] = [] - self.prefix = prefix - - def add( - self, - parent: Optional[str], - child: Optional[str], - allow: Optional[bool], - reason: str, - if_not_none: bool = False, - ) -> None: - """Add a permission row. If if_not_none=True, only add if allow is not None.""" - if if_not_none and allow is None: - return - self.rows.append(PermissionRow(parent, child, allow, reason)) - - def to_permission_sql(self) -> Optional[PermissionSQL]: - """Convert collected rows to a PermissionSQL object.""" - if not self.rows: - return None - - parts = [] - params = {} - - for idx, row in enumerate(self.rows): - key = f"{self.prefix}_{idx}" - parts.append( - f"SELECT :{key}_parent AS parent, :{key}_child AS child, " - f":{key}_allow AS allow, :{key}_reason AS reason" - ) - params[f"{key}_parent"] = row.parent - params[f"{key}_child"] = row.child - params[f"{key}_allow"] = 1 if row.allow else 0 - params[f"{key}_reason"] = row.reason - - sql = "\nUNION ALL\n".join(parts) - return PermissionSQL(sql=sql, params=params) diff --git a/datasette/default_permissions/restrictions.py b/datasette/default_permissions/restrictions.py deleted file mode 100644 index a22cd7e5..00000000 --- a/datasette/default_permissions/restrictions.py +++ /dev/null @@ -1,195 +0,0 @@ -""" -Actor restriction handling for Datasette permissions. - -This module handles the _r (restrictions) key in actor dictionaries, which -contains allowlists of resources the actor can access. -""" - -from __future__ import annotations - -from dataclasses import dataclass -from typing import TYPE_CHECKING, List, Optional, Set, Tuple - -if TYPE_CHECKING: - from datasette.app import Datasette - -from datasette import hookimpl -from datasette.permissions import PermissionSQL - -from .helpers import action_in_list, get_action_name_variants - - -@dataclass -class ActorRestrictions: - """Parsed actor restrictions from the _r key.""" - - global_actions: List[str] # _r.a - globally allowed actions - database_actions: dict # _r.d - {db_name: [actions]} - table_actions: dict # _r.r - {db_name: {table: [actions]}} - - @classmethod - def from_actor(cls, actor: Optional[dict]) -> Optional["ActorRestrictions"]: - """Parse restrictions from actor dict. Returns None if no restrictions.""" - if not actor: - return None - assert isinstance(actor, dict), "actor must be a dictionary" - - restrictions = actor.get("_r") - if restrictions is None: - return None - - return cls( - global_actions=restrictions.get("a", []), - database_actions=restrictions.get("d", {}), - table_actions=restrictions.get("r", {}), - ) - - def is_action_globally_allowed(self, datasette: "Datasette", action: str) -> bool: - """Check if action is in the global allowlist.""" - return action_in_list(datasette, action, self.global_actions) - - def get_allowed_databases(self, datasette: "Datasette", action: str) -> Set[str]: - """Get database names where this action is allowed.""" - allowed = set() - for db_name, db_actions in self.database_actions.items(): - if action_in_list(datasette, action, db_actions): - allowed.add(db_name) - return allowed - - def get_allowed_tables( - self, datasette: "Datasette", action: str - ) -> Set[Tuple[str, str]]: - """Get (database, table) pairs where this action is allowed.""" - allowed = set() - for db_name, tables in self.table_actions.items(): - for table_name, table_actions in tables.items(): - if action_in_list(datasette, action, table_actions): - allowed.add((db_name, table_name)) - return allowed - - -@hookimpl(specname="permission_resources_sql") -async def actor_restrictions_sql( - datasette: "Datasette", - actor: Optional[dict], - action: str, -) -> Optional[List[PermissionSQL]]: - """ - Handle actor restriction-based permission rules. - - When an actor has an "_r" key, it contains an allowlist of resources they - can access. This function returns restriction_sql that filters the final - results to only include resources in that allowlist. - - The _r structure: - { - "a": ["vi", "pd"], # Global actions allowed - "d": {"mydb": ["vt", "es"]}, # Database-level actions - "r": {"mydb": {"users": ["vt"]}} # Table-level actions - } - """ - if not actor: - return None - - restrictions = ActorRestrictions.from_actor(actor) - - if restrictions is None: - # No restrictions - all resources allowed - return [] - - # If globally allowed, no filtering needed - if restrictions.is_action_globally_allowed(datasette, action): - return [] - - # Build restriction SQL - allowed_dbs = restrictions.get_allowed_databases(datasette, action) - allowed_tables = restrictions.get_allowed_tables(datasette, action) - - # If nothing is allowed for this action, return empty-set restriction - if not allowed_dbs and not allowed_tables: - return [ - PermissionSQL( - params={"deny": f"actor restrictions: {action} not in allowlist"}, - restriction_sql="SELECT NULL AS parent, NULL AS child WHERE 0", - ) - ] - - # Build UNION of allowed resources - selects = [] - params = {} - counter = 0 - - # Database-level entries (parent, NULL) - allows all children - for db_name in allowed_dbs: - key = f"restr_{counter}" - counter += 1 - selects.append(f"SELECT :{key}_parent AS parent, NULL AS child") - params[f"{key}_parent"] = db_name - - # Table-level entries (parent, child) - for db_name, table_name in allowed_tables: - key = f"restr_{counter}" - counter += 1 - selects.append(f"SELECT :{key}_parent AS parent, :{key}_child AS child") - params[f"{key}_parent"] = db_name - params[f"{key}_child"] = table_name - - restriction_sql = "\nUNION ALL\n".join(selects) - - return [PermissionSQL(params=params, restriction_sql=restriction_sql)] - - -def restrictions_allow_action( - datasette: "Datasette", - restrictions: dict, - action: str, - resource: Optional[str | Tuple[str, str]], -) -> bool: - """ - Check if restrictions allow the requested action on the requested resource. - - This is a synchronous utility function for use by other code that needs - to quickly check restriction allowlists. - - Args: - datasette: The Datasette instance - restrictions: The _r dict from an actor - action: The action name to check - resource: None for global, str for database, (db, table) tuple for table - - Returns: - True if allowed, False if denied - """ - # Does this action have an abbreviation? - to_check = get_action_name_variants(datasette, action) - - # Check global level (any resource) - all_allowed = restrictions.get("a") - if all_allowed is not None: - assert isinstance(all_allowed, list) - if to_check.intersection(all_allowed): - return True - - # Check database level - if resource: - if isinstance(resource, str): - database_name = resource - else: - database_name = resource[0] - database_allowed = restrictions.get("d", {}).get(database_name) - if database_allowed is not None: - assert isinstance(database_allowed, list) - if to_check.intersection(database_allowed): - return True - - # Check table/resource level - if resource is not None and not isinstance(resource, str) and len(resource) == 2: - database, table = resource - table_allowed = restrictions.get("r", {}).get(database, {}).get(table) - if table_allowed is not None: - assert isinstance(table_allowed, list) - if to_check.intersection(table_allowed): - return True - - # This action is not explicitly allowed, so reject it - return False diff --git a/datasette/default_permissions/root.py b/datasette/default_permissions/root.py deleted file mode 100644 index 4931f7ff..00000000 --- a/datasette/default_permissions/root.py +++ /dev/null @@ -1,29 +0,0 @@ -""" -Root user permission handling for Datasette. - -Grants full permissions to the root user when --root flag is used. -""" - -from __future__ import annotations - -from typing import TYPE_CHECKING, Optional - -if TYPE_CHECKING: - from datasette.app import Datasette - -from datasette import hookimpl -from datasette.permissions import PermissionSQL - - -@hookimpl(specname="permission_resources_sql") -async def root_user_permissions_sql( - datasette: "Datasette", - actor: Optional[dict], -) -> Optional[PermissionSQL]: - """ - Grant root user full permissions when --root flag is used. - """ - if not datasette.root_enabled: - return None - if actor is not None and actor.get("id") == "root": - return PermissionSQL.allow(reason="root user") diff --git a/datasette/default_permissions/tokens.py b/datasette/default_permissions/tokens.py deleted file mode 100644 index 474b0c23..00000000 --- a/datasette/default_permissions/tokens.py +++ /dev/null @@ -1,95 +0,0 @@ -""" -Token authentication for Datasette. - -Handles signed API tokens (dstok_ prefix). -""" - -from __future__ import annotations - -import time -from typing import TYPE_CHECKING, Optional - -if TYPE_CHECKING: - from datasette.app import Datasette - -import itsdangerous - -from datasette import hookimpl - - -@hookimpl(specname="actor_from_request") -def actor_from_signed_api_token(datasette: "Datasette", request) -> Optional[dict]: - """ - Authenticate requests using signed API tokens (dstok_ prefix). - - Token structure (signed JSON): - { - "a": "actor_id", # Actor ID - "t": 1234567890, # Timestamp (Unix epoch) - "d": 3600, # Optional: Duration in seconds - "_r": {...} # Optional: Restrictions - } - """ - prefix = "dstok_" - - # Check if tokens are enabled - if not datasette.setting("allow_signed_tokens"): - return None - - max_signed_tokens_ttl = datasette.setting("max_signed_tokens_ttl") - - # Get authorization header - authorization = request.headers.get("authorization") - if not authorization: - return None - if not authorization.startswith("Bearer "): - return None - - token = authorization[len("Bearer ") :] - if not token.startswith(prefix): - return None - - # Remove prefix and verify signature - token = token[len(prefix) :] - try: - decoded = datasette.unsign(token, namespace="token") - except itsdangerous.BadSignature: - return None - - # Validate timestamp - if "t" not in decoded: - return None - created = decoded["t"] - if not isinstance(created, int): - return None - - # Handle duration/expiry - duration = decoded.get("d") - if duration is not None and not isinstance(duration, int): - return None - - # Apply max TTL if configured - if (duration is None and max_signed_tokens_ttl) or ( - duration is not None - and max_signed_tokens_ttl - and duration > max_signed_tokens_ttl - ): - duration = max_signed_tokens_ttl - - # Check expiry - if duration: - if time.time() - created > duration: - return None - - # Build actor dict - actor = {"id": decoded["a"], "token": "dstok"} - - # Copy restrictions if present - if "_r" in decoded: - actor["_r"] = decoded["_r"] - - # Add expiry timestamp if applicable - if duration: - actor["token_expires"] = created + duration - - return actor diff --git a/datasette/events.py b/datasette/events.py deleted file mode 100644 index 5cd5ba3d..00000000 --- a/datasette/events.py +++ /dev/null @@ -1,235 +0,0 @@ -from abc import ABC, abstractproperty -from dataclasses import asdict, dataclass, field -from datasette.hookspecs import hookimpl -from datetime import datetime, timezone - - -@dataclass -class Event(ABC): - @abstractproperty - def name(self): - pass - - created: datetime = field( - init=False, default_factory=lambda: datetime.now(timezone.utc) - ) - actor: dict | None - - def properties(self): - properties = asdict(self) - properties.pop("actor", None) - properties.pop("created", None) - return properties - - -@dataclass -class LoginEvent(Event): - """ - Event name: ``login`` - - A user (represented by ``event.actor``) has logged in. - """ - - name = "login" - - -@dataclass -class LogoutEvent(Event): - """ - Event name: ``logout`` - - A user (represented by ``event.actor``) has logged out. - """ - - name = "logout" - - -@dataclass -class CreateTokenEvent(Event): - """ - Event name: ``create-token`` - - A user created an API token. - - :ivar expires_after: Number of seconds after which this token will expire. - :type expires_after: int or None - :ivar restrict_all: Restricted permissions for this token. - :type restrict_all: list - :ivar restrict_database: Restricted database permissions for this token. - :type restrict_database: dict - :ivar restrict_resource: Restricted resource permissions for this token. - :type restrict_resource: dict - """ - - name = "create-token" - expires_after: int | None - restrict_all: list - restrict_database: dict - restrict_resource: dict - - -@dataclass -class CreateTableEvent(Event): - """ - Event name: ``create-table`` - - A new table has been created in the database. - - :ivar database: The name of the database where the table was created. - :type database: str - :ivar table: The name of the table that was created - :type table: str - :ivar schema: The SQL schema definition for the new table. - :type schema: str - """ - - name = "create-table" - database: str - table: str - schema: str - - -@dataclass -class DropTableEvent(Event): - """ - Event name: ``drop-table`` - - A table has been dropped from the database. - - :ivar database: The name of the database where the table was dropped. - :type database: str - :ivar table: The name of the table that was dropped - :type table: str - """ - - name = "drop-table" - database: str - table: str - - -@dataclass -class AlterTableEvent(Event): - """ - Event name: ``alter-table`` - - A table has been altered. - - :ivar database: The name of the database where the table was altered - :type database: str - :ivar table: The name of the table that was altered - :type table: str - :ivar before_schema: The table's SQL schema before the alteration - :type before_schema: str - :ivar after_schema: The table's SQL schema after the alteration - :type after_schema: str - """ - - name = "alter-table" - database: str - table: str - before_schema: str - after_schema: str - - -@dataclass -class InsertRowsEvent(Event): - """ - Event name: ``insert-rows`` - - Rows were inserted into a table. - - :ivar database: The name of the database where the rows were inserted. - :type database: str - :ivar table: The name of the table where the rows were inserted. - :type table: str - :ivar num_rows: The number of rows that were requested to be inserted. - :type num_rows: int - :ivar ignore: Was ignore set? - :type ignore: bool - :ivar replace: Was replace set? - :type replace: bool - """ - - name = "insert-rows" - database: str - table: str - num_rows: int - ignore: bool - replace: bool - - -@dataclass -class UpsertRowsEvent(Event): - """ - Event name: ``upsert-rows`` - - Rows were upserted into a table. - - :ivar database: The name of the database where the rows were inserted. - :type database: str - :ivar table: The name of the table where the rows were inserted. - :type table: str - :ivar num_rows: The number of rows that were requested to be inserted. - :type num_rows: int - """ - - name = "upsert-rows" - database: str - table: str - num_rows: int - - -@dataclass -class UpdateRowEvent(Event): - """ - Event name: ``update-row`` - - A row was updated in a table. - - :ivar database: The name of the database where the row was updated. - :type database: str - :ivar table: The name of the table where the row was updated. - :type table: str - :ivar pks: The primary key values of the updated row. - """ - - name = "update-row" - database: str - table: str - pks: list - - -@dataclass -class DeleteRowEvent(Event): - """ - Event name: ``delete-row`` - - A row was deleted from a table. - - :ivar database: The name of the database where the row was deleted. - :type database: str - :ivar table: The name of the table where the row was deleted. - :type table: str - :ivar pks: The primary key values of the deleted row. - """ - - name = "delete-row" - database: str - table: str - pks: list - - -@hookimpl -def register_events(): - return [ - LoginEvent, - LogoutEvent, - CreateTableEvent, - CreateTokenEvent, - AlterTableEvent, - DropTableEvent, - InsertRowsEvent, - UpsertRowsEvent, - UpdateRowEvent, - DeleteRowEvent, - ] diff --git a/datasette/facets.py b/datasette/facets.py index dd149424..7fb0c68b 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -11,8 +11,8 @@ from datasette.utils import ( ) -def load_facet_configs(request, table_config): - # Given a request and the configuration for a table, return +def load_facet_configs(request, table_metadata): + # Given a request and the metadata configuration for a table, return # a dictionary of selected facets, their lists of configs and for each # config whether it came from the request or the metadata. # @@ -20,21 +20,21 @@ def load_facet_configs(request, table_config): # {"source": "metadata", "config": config1}, # {"source": "request", "config": config2}]} facet_configs = {} - table_config = table_config or {} - table_facet_configs = table_config.get("facets", []) - for facet_config in table_facet_configs: - if isinstance(facet_config, str): + table_metadata = table_metadata or {} + metadata_facets = table_metadata.get("facets", []) + for metadata_config in metadata_facets: + if isinstance(metadata_config, str): type = "column" - facet_config = {"simple": facet_config} + metadata_config = {"simple": metadata_config} else: assert ( - len(facet_config.values()) == 1 + len(metadata_config.values()) == 1 ), "Metadata config dicts should be {type: config}" - type, facet_config = list(facet_config.items())[0] - if isinstance(facet_config, str): - facet_config = {"simple": facet_config} + type, metadata_config = list(metadata_config.items())[0] + if isinstance(metadata_config, str): + metadata_config = {"simple": metadata_config} facet_configs.setdefault(type, []).append( - {"source": "metadata", "config": facet_config} + {"source": "metadata", "config": metadata_config} ) qs_pairs = urllib.parse.parse_qs(request.query_string, keep_blank_values=True) for key, values in qs_pairs.items(): @@ -45,12 +45,13 @@ def load_facet_configs(request, table_config): elif key.startswith("_facet_"): type = key[len("_facet_") :] for value in values: - # The value is the facet_config - either JSON or not - facet_config = ( - json.loads(value) if value.startswith("{") else {"simple": value} - ) + # The value is the config - either JSON or not + if value.startswith("{"): + config = json.loads(value) + else: + config = {"simple": value} facet_configs.setdefault(type, []).append( - {"source": "request", "config": facet_config} + {"source": "request", "config": config} ) return facet_configs @@ -65,8 +66,6 @@ def register_facet_classes(): class Facet: type = None - # How many rows to consider when suggesting facets: - suggest_consider = 1000 def __init__( self, @@ -76,7 +75,7 @@ class Facet: sql=None, table=None, params=None, - table_config=None, + metadata=None, row_count=None, ): assert table or sql, "Must provide either table= or sql=" @@ -87,12 +86,12 @@ class Facet: self.table = table self.sql = sql or f"select * from [{table}]" self.params = params or [] - self.table_config = table_config + self.metadata = metadata # row_count can be None, in which case we calculate it ourselves: self.row_count = row_count def get_configs(self): - configs = load_facet_configs(self.request, self.table_config) + configs = load_facet_configs(self.request, self.metadata) return configs.get(self.type) or [] def get_querystring_pairs(self): @@ -105,15 +104,10 @@ class Facet: max_returned_rows = self.ds.setting("max_returned_rows") table_facet_size = None if self.table: - config_facet_size = ( - self.ds.config.get("databases", {}) - .get(self.database, {}) - .get("tables", {}) - .get(self.table, {}) - .get("facet_size") - ) - if config_facet_size: - table_facet_size = config_facet_size + tables_metadata = self.ds.metadata("tables", database=self.database) or {} + table_metadata = tables_metadata.get(self.table) or {} + if table_metadata: + table_facet_size = table_metadata.get("facet_size") custom_facet_size = self.request.args.get("_facet_size") if custom_facet_size: if custom_facet_size == "max": @@ -147,6 +141,17 @@ class Facet: ) ).columns + async def get_row_count(self): + if self.row_count is None: + self.row_count = ( + await self.ds.execute( + self.database, + f"select count(*) from ({self.sql})", + self.params, + ) + ).rows[0][0] + return self.row_count + class ColumnFacet(Facet): type = "column" @@ -161,16 +166,13 @@ class ColumnFacet(Facet): if column in already_enabled: continue suggested_facet_sql = """ - with limited as (select * from ({sql}) limit {suggest_consider}) - select {column} as value, count(*) as n from limited - where value is not null + select {column} as value, count(*) as n from ( + {sql} + ) where value is not null group by value limit {limit} """.format( - column=escape_sqlite(column), - sql=self.sql, - limit=facet_size + 1, - suggest_consider=self.suggest_consider, + column=escape_sqlite(column), sql=self.sql, limit=facet_size + 1 ) distinct_values = None try: @@ -205,17 +207,6 @@ class ColumnFacet(Facet): continue return suggested_facets - async def get_row_count(self): - if self.row_count is None: - self.row_count = ( - await self.ds.execute( - self.database, - f"select count(*) from (select * from ({self.sql}) limit {self.suggest_consider})", - self.params, - ) - ).rows[0][0] - return self.row_count - async def facet_results(self): facet_results = [] facets_timed_out = [] @@ -262,7 +253,7 @@ class ColumnFacet(Facet): # Attempt to expand foreign keys into labels values = [row["value"] for row in facet_rows] expanded = await self.ds.expand_foreign_keys( - self.request.actor, self.database, self.table, column, values + self.database, self.table, column, values ) else: expanded = {} @@ -318,14 +309,11 @@ class ArrayFacet(Facet): continue # Is every value in this column either null or a JSON array? suggested_facet_sql = """ - with limited as (select * from ({sql}) limit {suggest_consider}) select distinct json_type({column}) - from limited + from ({sql}) where {column} is not null and {column} != '' """.format( - column=escape_sqlite(column), - sql=self.sql, - suggest_consider=self.suggest_consider, + column=escape_sqlite(column), sql=self.sql ) try: results = await self.ds.execute( @@ -410,9 +398,7 @@ class ArrayFacet(Facet): order by count(*) desc, value limit {limit} """.format( - col=escape_sqlite(column), - sql=self.sql, - limit=facet_size + 1, + col=escape_sqlite(column), sql=self.sql, limit=facet_size + 1 ) try: facet_rows_results = await self.ds.execute( @@ -480,8 +466,8 @@ class DateFacet(Facet): # Does this column contain any dates in the first 100 rows? suggested_facet_sql = """ select date({column}) from ( - select * from ({sql}) limit 100 - ) where {column} glob "????-??-*" + {sql} + ) where {column} glob "????-??-*" limit 100; """.format( column=escape_sqlite(column), sql=self.sql ) diff --git a/datasette/filters.py b/datasette/filters.py index 95cc5f37..5ea3488b 100644 --- a/datasette/filters.py +++ b/datasette/filters.py @@ -1,8 +1,8 @@ from datasette import hookimpl -from datasette.resources import DatabaseResource from datasette.views.base import DatasetteError from datasette.utils.asgi import BadRequest import json +import numbers from .utils import detect_json1, escape_sqlite, path_with_removed_args @@ -13,10 +13,11 @@ def where_filters(request, database, datasette): where_clauses = [] extra_wheres_for_ui = [] if "_where" in request.args: - if not await datasette.allowed( - action="execute-sql", - resource=DatabaseResource(database=database), - actor=request.actor, + if not await datasette.permission_allowed( + request.actor, + "execute-sql", + resource=database, + default=True, ): raise DatasetteError("_where= is not allowed", status=403) else: @@ -49,7 +50,7 @@ def search_filters(request, database, table, datasette): extra_context = {} # Figure out which fts_table to use - table_metadata = await datasette.table_config(database, table) + table_metadata = datasette.table_metadata(database, table) db = datasette.get_database(database) fts_table = request.args.get("_fts_table") fts_table = fts_table or table_metadata.get("fts_table") @@ -79,9 +80,9 @@ def search_filters(request, database, table, datasette): "{fts_pk} in (select rowid from {fts_table} where {fts_table} match {match_clause})".format( fts_table=escape_sqlite(fts_table), fts_pk=escape_sqlite(fts_pk), - match_clause=( - ":search" if search_mode_raw else "escape_fts(:search)" - ), + match_clause=":search" + if search_mode_raw + else "escape_fts(:search)", ) ) human_descriptions.append(f'search matches "{search}"') @@ -98,11 +99,9 @@ def search_filters(request, database, table, datasette): "rowid in (select rowid from {fts_table} where {search_col} match {match_clause})".format( fts_table=escape_sqlite(fts_table), search_col=escape_sqlite(search_col), - match_clause=( - ":search_{}".format(i) - if search_mode_raw - else "escape_fts(:search_{})".format(i) - ), + match_clause=":search_{}".format(i) + if search_mode_raw + else "escape_fts(:search_{})".format(i), ) ) human_descriptions.append( @@ -280,13 +279,6 @@ class Filters: '{c} contains "{v}"', format="%{}%", ), - TemplatedFilter( - "notcontains", - "does not contain", - '"{c}" not like :{p}', - '{c} does not contain "{v}"', - format="%{}%", - ), TemplatedFilter( "endswith", "ends with", @@ -367,8 +359,12 @@ class Filters: ) _filters_by_key = {f.key: f for f in _filters} - def __init__(self, pairs): + def __init__(self, pairs, units=None, ureg=None): + if units is None: + units = {} self.pairs = pairs + self.units = units + self.ureg = ureg def lookups(self): """Yields (lookup, display, no_argument) pairs""" @@ -408,6 +404,20 @@ class Filters: def has_selections(self): return bool(self.pairs) + def convert_unit(self, column, value): + """If the user has provided a unit in the query, convert it into the column unit, if present.""" + if column not in self.units: + return value + + # Try to interpret the value as a unit + value = self.ureg(value) + if isinstance(value, numbers.Number): + # It's just a bare number, assume it's the column unit + return value + + column_unit = self.ureg(self.units[column]) + return value.to(column_unit).magnitude + def build_where_clauses(self, table): sql_bits = [] params = {} @@ -415,7 +425,9 @@ class Filters: for column, lookup, value in self.selections(): filter = self._filters_by_key.get(lookup, None) if filter: - sql_bit, param = filter.where_clause(table, column, value, i) + sql_bit, param = filter.where_clause( + table, column, self.convert_unit(column, value), i + ) sql_bits.append(sql_bit) if param is not None: if not isinstance(param, list): diff --git a/datasette/forbidden.py b/datasette/forbidden.py index 41c48396..156a44d4 100644 --- a/datasette/forbidden.py +++ b/datasette/forbidden.py @@ -1,3 +1,4 @@ +from os import stat from datasette import hookimpl, Response diff --git a/datasette/handle_exception.py b/datasette/handle_exception.py index 96398a4c..8b7e83e3 100644 --- a/datasette/handle_exception.py +++ b/datasette/handle_exception.py @@ -1,16 +1,14 @@ from datasette import hookimpl, Response -from .utils import add_cors_headers +from .utils import await_me_maybe, add_cors_headers from .utils.asgi import ( Base400, + Forbidden, ) from .views.base import DatasetteError from markupsafe import Markup +import pdb import traceback - -try: - import ipdb as pdb -except ImportError: - import pdb +from .plugins import pm try: import rich @@ -59,8 +57,7 @@ def handle_exception(datasette, request, exception): if request.path.split("?")[0].endswith(".json"): return Response.json(info, status=status, headers=headers) else: - environment = datasette.get_jinja_environment(request) - template = environment.select_template(templates) + template = datasette.jinja_env.select_template(templates) return Response.html( await template.render_async( dict( diff --git a/datasette/hookspecs.py b/datasette/hookspecs.py index 3f6a1425..34e19664 100644 --- a/datasette/hookspecs.py +++ b/datasette/hookspecs.py @@ -10,6 +10,11 @@ def startup(datasette): """Fires directly after Datasette first starts running""" +@hookspec +def get_metadata(datasette, key, database, table): + """Return metadata to be merged into Datasette's metadata dictionary""" + + @hookspec def asgi_wrapper(datasette): """Returns an ASGI middleware callable to wrap our ASGI application with""" @@ -55,7 +60,7 @@ def publish_subcommand(publish): @hookspec -def render_cell(row, value, column, table, database, datasette, request): +def render_cell(row, value, column, table, database, datasette): """Customize rendering of HTML table cell values""" @@ -69,11 +74,6 @@ def register_facet_classes(): """Register Facet subclasses""" -@hookspec -def register_actions(datasette): - """Register actions: returns a list of datasette.permission.Action objects""" - - @hookspec def register_routes(datasette): """Register URL routes: return a list of (regex, view_function) pairs""" @@ -89,16 +89,6 @@ def actor_from_request(datasette, request): """Return an actor dictionary based on the incoming request""" -@hookspec(firstresult=True) -def actors_from_ids(datasette, actor_ids): - """Returns a dictionary mapping those IDs to actor dictionaries""" - - -@hookspec -def jinja2_environment_from_request(datasette, request, env): - """Return a Jinja2 environment based on the incoming request""" - - @hookspec def filters_from_request(request, database, table, datasette): """ @@ -111,15 +101,8 @@ def filters_from_request(request, database, table, datasette): @hookspec -def permission_resources_sql(datasette, actor, action): - """Return SQL query fragments for permission checks on resources. - - Returns None, a PermissionSQL object, or a list of PermissionSQL objects. - Each PermissionSQL contains SQL that should return rows with columns: - parent (str|None), child (str|None), allow (int), reason (str). - - Used to efficiently check permissions across multiple resources at once. - """ +def permission_allowed(datasette, actor, action, resource): + """Check if actor is allowed to perform this action - return True, False or None""" @hookspec @@ -142,36 +125,16 @@ def menu_links(datasette, actor, request): """Links for the navigation menu""" -@hookspec -def row_actions(datasette, actor, request, database, table, row): - """Links for the row actions menu""" - - @hookspec def table_actions(datasette, actor, database, table, request): """Links for the table actions menu""" -@hookspec -def view_actions(datasette, actor, database, view, request): - """Links for the view actions menu""" - - -@hookspec -def query_actions(datasette, actor, database, query_name, request, sql, params): - """Links for the query and canned query actions menu""" - - @hookspec def database_actions(datasette, actor, database, request): """Links for the database actions menu""" -@hookspec -def homepage_actions(datasette, actor, request): - """Links for the homepage actions menu""" - - @hookspec def skip_csrf(datasette, scope): """Mechanism for skipping CSRF checks for certain requests""" @@ -180,43 +143,3 @@ def skip_csrf(datasette, scope): @hookspec def handle_exception(datasette, request, exception): """Handle an uncaught exception. Can return a Response or None.""" - - -@hookspec -def track_event(datasette, event): - """Respond to an event tracked by Datasette""" - - -@hookspec -def register_events(datasette): - """Return a list of Event subclasses to use with track_event()""" - - -@hookspec -def top_homepage(datasette, request): - """HTML to include at the top of the homepage""" - - -@hookspec -def top_database(datasette, request, database): - """HTML to include at the top of the database page""" - - -@hookspec -def top_table(datasette, request, database, table): - """HTML to include at the top of the table page""" - - -@hookspec -def top_row(datasette, request, database, table, row): - """HTML to include at the top of the row page""" - - -@hookspec -def top_query(datasette, request, database, sql): - """HTML to include at the top of the query results page""" - - -@hookspec -def top_canned_query(datasette, request, database, query_name): - """HTML to include at the top of the canned query page""" diff --git a/datasette/permissions.py b/datasette/permissions.py index c48293ac..91c9e774 100644 --- a/datasette/permissions.py +++ b/datasette/permissions.py @@ -1,210 +1,19 @@ -from abc import ABC, abstractmethod -from dataclasses import dataclass -from typing import Any, NamedTuple -import contextvars +import collections - -# Context variable to track when permission checks should be skipped -_skip_permission_checks = contextvars.ContextVar( - "skip_permission_checks", default=False +Permission = collections.namedtuple( + "Permission", ("name", "abbr", "takes_database", "takes_table", "default") ) - -class SkipPermissions: - """Context manager to temporarily skip permission checks. - - This is not a stable API and may change in future releases. - - Usage: - with SkipPermissions(): - # Permission checks are skipped within this block - response = await datasette.client.get("/protected") - """ - - def __enter__(self): - self.token = _skip_permission_checks.set(True) - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - _skip_permission_checks.reset(self.token) - return False - - -class Resource(ABC): - """ - Base class for all resource types. - - Each subclass represents a type of resource (e.g., TableResource, DatabaseResource). - The class itself carries metadata about the resource type. - Instances represent specific resources. - """ - - # Class-level metadata (subclasses must define these) - name: str = None # e.g., "table", "database", "model" - parent_class: type["Resource"] | None = None # e.g., DatabaseResource for tables - - # Instance-level optional extra attributes - reasons: list[str] | None = None - include_reasons: bool | None = None - - def __init__(self, parent: str | None = None, child: str | None = None): - """ - Create a resource instance. - - Args: - parent: The parent identifier (meaning depends on resource type) - child: The child identifier (meaning depends on resource type) - """ - self.parent = parent - self.child = child - self._private = None # Sentinel to track if private was set - - @property - def private(self) -> bool: - """ - Whether this resource is private (accessible to actor but not anonymous). - - This property is only available on Resource objects returned from - allowed_resources() when include_is_private=True is used. - - Raises: - AttributeError: If accessed without calling include_is_private=True - """ - if self._private is None: - raise AttributeError( - "The 'private' attribute is only available when using " - "allowed_resources(..., include_is_private=True)" - ) - return self._private - - @private.setter - def private(self, value: bool): - self._private = value - - @classmethod - def __init_subclass__(cls): - """ - Validate resource hierarchy doesn't exceed 2 levels. - - Raises: - ValueError: If this resource would create a 3-level hierarchy - """ - super().__init_subclass__() - - if cls.parent_class is None: - return # Top of hierarchy, nothing to validate - - # Check if our parent has a parent - that would create 3 levels - if cls.parent_class.parent_class is not None: - # We have a parent, and that parent has a parent - # This creates a 3-level hierarchy, which is not allowed - raise ValueError( - f"Resource {cls.__name__} creates a 3-level hierarchy: " - f"{cls.parent_class.parent_class.__name__} -> {cls.parent_class.__name__} -> {cls.__name__}. " - f"Maximum 2 levels allowed (parent -> child)." - ) - - @classmethod - @abstractmethod - def resources_sql(cls) -> str: - """ - Return SQL query that returns all resources of this type. - - Must return two columns: parent, child - """ - pass - - -class AllowedResource(NamedTuple): - """A resource with the reason it was allowed (for debugging).""" - - resource: Resource - reason: str - - -@dataclass(frozen=True, kw_only=True) -class Action: - name: str - description: str | None - abbr: str | None = None - resource_class: type[Resource] | None = None - also_requires: str | None = None # Optional action name that must also be allowed - - @property - def takes_parent(self) -> bool: - """ - Whether this action requires a parent identifier when instantiating its resource. - - Returns False for global-only actions (no resource_class). - Returns True for all actions with a resource_class (all resources require a parent identifier). - """ - return self.resource_class is not None - - @property - def takes_child(self) -> bool: - """ - Whether this action requires a child identifier when instantiating its resource. - - Returns False for global actions (no resource_class). - Returns False for parent-level resources (DatabaseResource - parent_class is None). - Returns True for child-level resources (TableResource, QueryResource - have a parent_class). - """ - if self.resource_class is None: - return False - return self.resource_class.parent_class is not None - - -_reason_id = 1 - - -@dataclass -class PermissionSQL: - """ - A plugin contributes SQL that yields: - parent TEXT NULL, - child TEXT NULL, - allow INTEGER, -- 1 allow, 0 deny - reason TEXT - - For restriction-only plugins, sql can be None and only restriction_sql is provided. - """ - - sql: str | None = ( - None # SQL that SELECTs the 4 columns above (can be None for restriction-only) - ) - params: dict[str, Any] | None = ( - None # bound params for the SQL (values only; no ':' prefix) - ) - source: str | None = None # System will set this to the plugin name - restriction_sql: str | None = ( - None # Optional SQL that returns (parent, child) for restriction filtering - ) - - @classmethod - def allow(cls, reason: str, _allow: bool = True) -> "PermissionSQL": - global _reason_id - i = _reason_id - _reason_id += 1 - return cls( - sql=f"SELECT NULL AS parent, NULL AS child, {1 if _allow else 0} AS allow, :reason_{i} AS reason", - params={f"reason_{i}": reason}, - ) - - @classmethod - def deny(cls, reason: str) -> "PermissionSQL": - return cls.allow(reason=reason, _allow=False) - - -# This is obsolete, replaced by Action and ResourceType -@dataclass -class Permission: - name: str - abbr: str | None - description: str | None - takes_database: bool - takes_resource: bool - default: bool - # This is deliberately undocumented: it's considered an internal - # implementation detail for view-table/view-database and should - # not be used by plugins as it may change in the future. - implies_can_view: bool = False +PERMISSIONS = ( + Permission("view-instance", "vi", False, False, True), + Permission("view-database", "vd", True, False, True), + Permission("view-database-download", "vdd", True, False, True), + Permission("view-table", "vt", True, True, True), + Permission("view-query", "vq", True, True, True), + Permission("insert-row", "ir", True, True, False), + Permission("delete-row", "dr", True, True, False), + Permission("drop-table", "dt", True, True, False), + Permission("execute-sql", "es", True, False, True), + Permission("permissions-debug", "pd", False, False, False), + Permission("debug-menu", "dm", False, False, False), +) diff --git a/datasette/plugins.py b/datasette/plugins.py index e9818885..fef0c8e9 100644 --- a/datasette/plugins.py +++ b/datasette/plugins.py @@ -1,20 +1,9 @@ import importlib -import os import pluggy -from pprint import pprint +import pkg_resources import sys from . import hookspecs -if sys.version_info >= (3, 9): - import importlib.resources as importlib_resources -else: - import importlib_resources -if sys.version_info >= (3, 10): - import importlib.metadata as importlib_metadata -else: - import importlib_metadata - - DEFAULT_PLUGINS = ( "datasette.publish.heroku", "datasette.publish.cloudrun", @@ -23,65 +12,20 @@ DEFAULT_PLUGINS = ( "datasette.sql_functions", "datasette.actor_auth_cookie", "datasette.default_permissions", - "datasette.default_actions", "datasette.default_magic_parameters", "datasette.blob_renderer", "datasette.default_menu_links", "datasette.handle_exception", "datasette.forbidden", - "datasette.events", ) pm = pluggy.PluginManager("datasette") pm.add_hookspecs(hookspecs) -DATASETTE_TRACE_PLUGINS = os.environ.get("DATASETTE_TRACE_PLUGINS", None) - - -def before(hook_name, hook_impls, kwargs): - print(file=sys.stderr) - print(f"{hook_name}:", file=sys.stderr) - pprint(kwargs, width=40, indent=4, stream=sys.stderr) - print("Hook implementations:", file=sys.stderr) - pprint(hook_impls, width=40, indent=4, stream=sys.stderr) - - -def after(outcome, hook_name, hook_impls, kwargs): - results = outcome.get_result() - if not isinstance(results, list): - results = [results] - print("Results:", file=sys.stderr) - pprint(results, width=40, indent=4, stream=sys.stderr) - - -if DATASETTE_TRACE_PLUGINS: - pm.add_hookcall_monitoring(before, after) - - -DATASETTE_LOAD_PLUGINS = os.environ.get("DATASETTE_LOAD_PLUGINS", None) - -if not hasattr(sys, "_called_from_test") and DATASETTE_LOAD_PLUGINS is None: +if not hasattr(sys, "_called_from_test"): # Only load plugins if not running tests pm.load_setuptools_entrypoints("datasette") -# Load any plugins specified in DATASETTE_LOAD_PLUGINS") -if DATASETTE_LOAD_PLUGINS is not None: - for package_name in [ - name for name in DATASETTE_LOAD_PLUGINS.split(",") if name.strip() - ]: - try: - distribution = importlib_metadata.distribution(package_name) - entry_points = distribution.entry_points - for entry_point in entry_points: - if entry_point.group == "datasette": - mod = entry_point.load() - pm.register(mod, name=entry_point.name) - # Ensure name can be found in plugin_to_distinfo later: - pm._plugin_distinfo.append((mod, distribution)) - except importlib_metadata.PackageNotFoundError: - sys.stderr.write("Plugin {} could not be found\n".format(package_name)) - - # Load default plugins for plugin in DEFAULT_PLUGINS: mod = importlib.import_module(plugin) @@ -94,24 +38,21 @@ def get_plugins(): for plugin in pm.get_plugins(): static_path = None templates_path = None - plugin_name = ( - plugin.__name__ - if hasattr(plugin, "__name__") - else plugin.__class__.__name__ - ) - if plugin_name not in DEFAULT_PLUGINS: + if plugin.__name__ not in DEFAULT_PLUGINS: try: - if (importlib_resources.files(plugin_name) / "static").is_dir(): - static_path = str(importlib_resources.files(plugin_name) / "static") - if (importlib_resources.files(plugin_name) / "templates").is_dir(): - templates_path = str( - importlib_resources.files(plugin_name) / "templates" + if pkg_resources.resource_isdir(plugin.__name__, "static"): + static_path = pkg_resources.resource_filename( + plugin.__name__, "static" ) - except (TypeError, ModuleNotFoundError): - # Caused by --plugins_dir= plugins + if pkg_resources.resource_isdir(plugin.__name__, "templates"): + templates_path = pkg_resources.resource_filename( + plugin.__name__, "templates" + ) + except (KeyError, ImportError): + # Caused by --plugins_dir= plugins - KeyError/ImportError thrown in Py3.5 pass plugin_info = { - "name": plugin_name, + "name": plugin.__name__, "static_path": static_path, "templates_path": templates_path, "hooks": [h.name for h in pm.get_hookcallers(plugin)], @@ -119,6 +60,6 @@ def get_plugins(): distinfo = plugin_to_distinfo.get(plugin) if distinfo: plugin_info["version"] = distinfo.version - plugin_info["name"] = distinfo.name or distinfo.project_name + plugin_info["name"] = distinfo.project_name plugins.append(plugin_info) return plugins diff --git a/datasette/publish/cloudrun.py b/datasette/publish/cloudrun.py index 63d22fe8..77274eb0 100644 --- a/datasette/publish/cloudrun.py +++ b/datasette/publish/cloudrun.py @@ -3,7 +3,7 @@ import click import json import os import re -from subprocess import CalledProcessError, check_call, check_output +from subprocess import check_call, check_output from .common import ( add_common_publish_arguments_and_options, @@ -23,9 +23,7 @@ def publish_subcommand(publish): help="Application name to use when building", ) @click.option( - "--service", - default="", - help="Cloud Run service to deploy (or over-write)", + "--service", default="", help="Cloud Run service to deploy (or over-write)" ) @click.option("--spatialite", is_flag=True, help="Enable SpatialLite extension") @click.option( @@ -57,32 +55,13 @@ def publish_subcommand(publish): @click.option( "--max-instances", type=int, - default=1, - show_default=True, - help="Maximum Cloud Run instances (use 0 to remove the limit)", + help="Maximum Cloud Run instances", ) @click.option( "--min-instances", type=int, help="Minimum Cloud Run instances", ) - @click.option( - "--artifact-repository", - default="datasette", - show_default=True, - help="Artifact Registry repository to store the image", - ) - @click.option( - "--artifact-region", - default="us", - show_default=True, - help="Artifact Registry location (region or multi-region)", - ) - @click.option( - "--artifact-project", - default=None, - help="Project ID for Artifact Registry (defaults to the active project)", - ) def cloudrun( files, metadata, @@ -112,9 +91,6 @@ def publish_subcommand(publish): apt_get_extras, max_instances, min_instances, - artifact_repository, - artifact_region, - artifact_project, ): "Publish databases to Datasette running on Cloud Run" fail_if_publish_binary_not_installed( @@ -124,21 +100,6 @@ def publish_subcommand(publish): "gcloud config get-value project", shell=True, universal_newlines=True ).strip() - artifact_project = artifact_project or project - - # Ensure Artifact Registry exists for the target image - _ensure_artifact_registry( - artifact_project=artifact_project, - artifact_region=artifact_region, - artifact_repository=artifact_repository, - ) - - artifact_host = ( - artifact_region - if artifact_region.endswith("-docker.pkg.dev") - else f"{artifact_region}-docker.pkg.dev" - ) - if not service: # Show the user their current services, then prompt for one click.echo("Please provide a service name for this deployment\n") @@ -156,11 +117,6 @@ def publish_subcommand(publish): click.echo("") service = click.prompt("Service name", type=str) - image_id = ( - f"{artifact_host}/{artifact_project}/" - f"{artifact_repository}/datasette-{service}" - ) - extra_metadata = { "title": title, "license": license, @@ -217,6 +173,7 @@ def publish_subcommand(publish): print(fp.read()) print("\n====================\n") + image_id = f"gcr.io/{project}/{name}" check_call( "gcloud builds submit --tag {}{}".format( image_id, " --timeout {}".format(timeout) if timeout else "" @@ -230,7 +187,7 @@ def publish_subcommand(publish): ("--max-instances", max_instances), ("--min-instances", min_instances), ): - if value is not None: + if value: extra_deploy_options.append("{} {}".format(option, value)) check_call( "gcloud run deploy --allow-unauthenticated --platform=managed --image {} {}{}".format( @@ -242,52 +199,6 @@ def publish_subcommand(publish): ) -def _ensure_artifact_registry(artifact_project, artifact_region, artifact_repository): - """Ensure Artifact Registry API is enabled and the repository exists.""" - - enable_cmd = ( - "gcloud services enable artifactregistry.googleapis.com " - f"--project {artifact_project} --quiet" - ) - try: - check_call(enable_cmd, shell=True) - except CalledProcessError as exc: - raise click.ClickException( - "Failed to enable artifactregistry.googleapis.com. " - "Please ensure you have permissions to manage services." - ) from exc - - describe_cmd = ( - "gcloud artifacts repositories describe {repo} --project {project} " - "--location {location} --quiet" - ).format( - repo=artifact_repository, - project=artifact_project, - location=artifact_region, - ) - try: - check_call(describe_cmd, shell=True) - return - except CalledProcessError: - create_cmd = ( - "gcloud artifacts repositories create {repo} --repository-format=docker " - '--location {location} --project {project} --description "Datasette Cloud Run images" --quiet' - ).format( - repo=artifact_repository, - location=artifact_region, - project=artifact_project, - ) - try: - check_call(create_cmd, shell=True) - click.echo(f"Created Artifact Registry repository '{artifact_repository}'") - except CalledProcessError as exc: - raise click.ClickException( - "Failed to create Artifact Registry repository. " - "Use --artifact-repository/--artifact-region to point to an existing repo " - "or create one manually." - ) from exc - - def get_existing_services(): services = json.loads( check_output( @@ -303,7 +214,6 @@ def get_existing_services(): "url": service["status"]["address"]["url"], } for service in services - if "url" in service["status"] ] diff --git a/datasette/renderer.py b/datasette/renderer.py index acf23e59..45089498 100644 --- a/datasette/renderer.py +++ b/datasette/renderer.py @@ -4,7 +4,6 @@ from datasette.utils import ( remove_infinites, CustomJSONEncoder, path_from_row_pks, - sqlite3, ) from datasette.utils.asgi import Response @@ -20,14 +19,14 @@ def convert_specific_columns_to_json(rows, columns, json_cols): if column in json_cols: try: value = json.loads(value) - except (TypeError, ValueError): + except (TypeError, ValueError) as e: pass new_row.append(value) new_rows.append(new_row) return new_rows -def json_renderer(request, args, data, error, truncated=None): +def json_renderer(args, data, view_name): """Render a response as JSON""" status_code = 200 @@ -45,38 +44,28 @@ def json_renderer(request, args, data, error, truncated=None): data["rows"] = [remove_infinites(row) for row in data["rows"]] # Deal with the _shape option - shape = args.get("_shape", "objects") + shape = args.get("_shape", "arrays") # if there's an error, ignore the shape entirely - data["ok"] = True - if error: - shape = "objects" - status_code = 400 - data["error"] = error - data["ok"] = False + if data.get("error"): + shape = "arrays" + + next_url = data.get("next_url") - if truncated is not None: - data["truncated"] = truncated if shape == "arrayfirst": - if not data["rows"]: - data = [] - elif isinstance(data["rows"][0], sqlite3.Row): - data = [row[0] for row in data["rows"]] - else: - assert isinstance(data["rows"][0], dict) - data = [next(iter(row.values())) for row in data["rows"]] + data = [row[0] for row in data["rows"]] elif shape in ("objects", "object", "array"): columns = data.get("columns") rows = data.get("rows") - if rows and columns and not isinstance(rows[0], dict): + if rows and columns: data["rows"] = [dict(zip(columns, row)) for row in rows] if shape == "object": - shape_error = None + error = None if "primary_keys" not in data: - shape_error = "_shape=object is only available on tables" + error = "_shape=object is only available on tables" else: pks = data["primary_keys"] if not pks: - shape_error = ( + error = ( "_shape=object not available for tables with no primary keys" ) else: @@ -85,18 +74,13 @@ def json_renderer(request, args, data, error, truncated=None): pk_string = path_from_row_pks(row, pks, not pks) object_rows[pk_string] = row data = object_rows - if shape_error: - data = {"ok": False, "error": shape_error} + if error: + data = {"ok": False, "error": error} elif shape == "array": data = data["rows"] elif shape == "arrays": - if not data["rows"]: - pass - elif isinstance(data["rows"][0], sqlite3.Row): - data["rows"] = [list(row) for row in data["rows"]] - else: - data["rows"] = [list(row.values()) for row in data["rows"]] + pass else: status_code = 400 data = { @@ -105,12 +89,6 @@ def json_renderer(request, args, data, error, truncated=None): "status": 400, "title": None, } - - # Don't include "columns" in output - # https://github.com/simonw/datasette/issues/2136 - if isinstance(data, dict) and "columns" not in request.args.getlist("_extra"): - data.pop("columns", None) - # Handle _nl option for _shape=array nl = args.get("_nl", "") if nl and shape == "array": @@ -120,6 +98,8 @@ def json_renderer(request, args, data, error, truncated=None): body = json.dumps(data, cls=CustomJSONEncoder) content_type = "application/json; charset=utf-8" headers = {} + if next_url: + headers["link"] = f'<{next_url}>; rel="next"' return Response( body, status=status_code, headers=headers, content_type=content_type ) diff --git a/datasette/resources.py b/datasette/resources.py deleted file mode 100644 index 641afb2f..00000000 --- a/datasette/resources.py +++ /dev/null @@ -1,90 +0,0 @@ -"""Core resource types for Datasette's permission system.""" - -from datasette.permissions import Resource - - -class DatabaseResource(Resource): - """A database in Datasette.""" - - name = "database" - parent_class = None # Top of the resource hierarchy - - def __init__(self, database: str): - super().__init__(parent=database, child=None) - - @classmethod - async def resources_sql(cls, datasette) -> str: - return """ - SELECT database_name AS parent, NULL AS child - FROM catalog_databases - """ - - -class TableResource(Resource): - """A table in a database.""" - - name = "table" - parent_class = DatabaseResource - - def __init__(self, database: str, table: str): - super().__init__(parent=database, child=table) - - @classmethod - async def resources_sql(cls, datasette) -> str: - return """ - SELECT database_name AS parent, table_name AS child - FROM catalog_tables - UNION ALL - SELECT database_name AS parent, view_name AS child - FROM catalog_views - """ - - -class QueryResource(Resource): - """A canned query in a database.""" - - name = "query" - parent_class = DatabaseResource - - def __init__(self, database: str, query: str): - super().__init__(parent=database, child=query) - - @classmethod - async def resources_sql(cls, datasette) -> str: - from datasette.plugins import pm - from datasette.utils import await_me_maybe - - # Get all databases from catalog - db = datasette.get_internal_database() - result = await db.execute("SELECT database_name FROM catalog_databases") - databases = [row[0] for row in result.rows] - - # Gather all canned queries from all databases - query_pairs = [] - for database_name in databases: - # Call the hook to get queries (including from config via default plugin) - for queries_result in pm.hook.canned_queries( - datasette=datasette, - database=database_name, - actor=None, # Get ALL queries for resource enumeration - ): - queries = await await_me_maybe(queries_result) - if queries: - for query_name in queries.keys(): - query_pairs.append((database_name, query_name)) - - # Build SQL - if not query_pairs: - return "SELECT NULL AS parent, NULL AS child WHERE 0" - - # Generate UNION ALL query - selects = [] - for db_name, query_name in query_pairs: - # Escape single quotes by doubling them - db_escaped = db_name.replace("'", "''") - query_escaped = query_name.replace("'", "''") - selects.append( - f"SELECT '{db_escaped}' AS parent, '{query_escaped}' AS child" - ) - - return " UNION ALL ".join(selects) diff --git a/datasette/static/app.css b/datasette/static/app.css index a3117152..712b9925 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -163,22 +163,28 @@ h6, } .page-header { + display: flex; + align-items: center; padding-left: 10px; border-left: 10px solid #666; margin-bottom: 0.75rem; margin-top: 1rem; } .page-header h1 { + display: inline; margin: 0; font-size: 2rem; padding-right: 0.2em; } - -.page-action-menu details > summary { +.page-header details { + display: inline-flex; +} +.page-header details > summary { list-style: none; + display: inline-flex; cursor: pointer; } -.page-action-menu details > summary::-webkit-details-marker { +.page-header details > summary::-webkit-details-marker { display: none; } @@ -222,6 +228,12 @@ button.button-as-link:focus { color: #67C98D; } +a img { + display: block; + max-width: 100%; + border: 0; +} + code, pre { font-family: monospace; @@ -259,28 +271,24 @@ a.not-underlined { /* Page Furniture ========================================================= */ /* Header */ -header.hd, -footer.ft { +header, +footer { padding: 0.6rem 1rem 0.5rem 1rem; background-color: #276890; - background: linear-gradient(180deg, rgba(96,144,173,1) 0%, rgba(39,104,144,1) 50%); color: rgba(255,255,244,0.9); overflow: hidden; box-sizing: border-box; min-height: 2.6rem; } -footer.ft { - margin-top: 1rem; -} -header.hd p, -footer.ft p { +header p, +footer p { margin: 0; padding: 0; } -header.hd .crumbs { +header .crumbs { float: left; } -header.hd .actor { +header .actor { float: right; text-align: right; padding-left: 1rem; @@ -289,32 +297,32 @@ header.hd .actor { top: -3px; } -footer.ft a:link, -footer.ft a:visited, -footer.ft a:hover, -footer.ft a:focus, -footer.ft a:active, -footer.ft button.button-as-link { +footer a:link, +footer a:visited, +footer a:hover, +footer a:focus, +footer a:active, +footer button.button-as-link { color: rgba(255,255,244,0.8); } -header.hd a:link, -header.hd a:visited, -header.hd a:hover, -header.hd a:focus, -header.hd a:active, -header.hd button.button-as-link { +header a:link, +header a:visited, +header a:hover, +header a:focus, +header a:active, +header button.button-as-link { color: rgba(255,255,244,0.8); text-decoration: none; } -footer.ft a:hover, -footer.ft a:focus, -footer.ft a:active, -footer.ft .button-as-link:hover, -footer.ft .button-as-link:focus, -header.hd a:hover, -header.hd a:focus, -header.hd a:active, +footer a:hover, +footer a:focus, +footer a:active, +footer.button-as-link:hover, +footer.button-as-link:focus, +header a:hover, +header a:focus, +header a:active, button.button-as-link:hover, button.button-as-link:focus { color: rgba(255,255,244,1); @@ -326,6 +334,11 @@ section.content { margin: 0 1rem; } +/* Footer */ +footer { + margin-top: 1rem; +} + /* Navigation menu */ details.nav-menu > summary { list-style: none; @@ -339,59 +352,25 @@ details.nav-menu > summary::-webkit-details-marker { } details .nav-menu-inner { position: absolute; - top: 2.6rem; + top: 2rem; right: 10px; width: 180px; background-color: #276890; + padding: 1rem; z-index: 1000; - padding: 0; -} -.nav-menu-inner li, -form.nav-menu-logout { - padding: 0.3rem 0.5rem; - border-top: 1px solid #ffffff69; } .nav-menu-inner a { display: block; } /* Table/database actions menu */ -.page-action-menu { +.page-header { position: relative; - margin-bottom: 0.5em; -} -.actions-menu-links { - display: inline; } .actions-menu-links .dropdown-menu { position: absolute; top: calc(100% + 10px); - left: 0; - z-index: 10000; -} -.page-action-menu .icon-text { - display: inline-flex; - align-items: center; - border-radius: .25rem; - padding: 5px 12px 3px 7px; - color: #fff; - font-weight: 400; - font-size: 0.8em; - background: linear-gradient(180deg, #007bff 0%, #4E79C7 100%); - border-color: #007bff; -} -.page-action-menu .icon-text span { - /* Nudge text up a bit */ - position: relative; - top: -2px; -} -.page-action-menu .icon-text:hover { - cursor: pointer; -} -.page-action-menu .icon { - width: 18px; - height: 18px; - margin-right: 4px; + left: -10px; } /* Components ============================================================== */ @@ -444,30 +423,36 @@ h2 em { .table-wrapper { overflow-x: auto; } -table.rows-and-columns { +table { border-collapse: collapse; } -table.rows-and-columns td { +td { border-top: 1px solid #aaa; border-right: 1px solid #eee; padding: 4px; vertical-align: top; white-space: pre-wrap; } -table.rows-and-columns td.type-pk { +td.type-pk { font-weight: bold; } -table.rows-and-columns td em { +td em { font-style: normal; font-size: 0.8em; color: #aaa; } -table.rows-and-columns th { +th { padding-right: 1em; } -table.rows-and-columns a:link { +table a:link { text-decoration: none; } +.rows-and-columns td:before { + display: block; + color: black; + margin-left: -10%; + font-size: 0.8em; +} .rows-and-columns td ol, .rows-and-columns td ul { list-style: initial; @@ -485,8 +470,10 @@ a.blob-download { margin-bottom: 0; } + /* Forms =================================================================== */ + form.sql textarea { border: 1px solid #ccc; width: 70%; @@ -495,30 +482,27 @@ form.sql textarea { font-family: monospace; font-size: 1.3em; } -form.sql label { +form label { + font-weight: bold; + display: inline-block; width: 15%; } +.advanced-export form label { + width: auto; +} .advanced-export input[type=submit] { font-size: 0.6em; margin-left: 1em; } label.sort_by_desc { + width: auto; padding-right: 1em; } pre#sql-query { margin-bottom: 1em; } - -.core label, -label.core { - font-weight: bold; - display: inline-block; -} - -.core input[type=text], -input.core[type=text], -.core input[type=search], -input.core[type=search] { +form input[type=text], +form input[type=search] { border: 1px solid #ccc; border-radius: 3px; width: 60%; @@ -527,27 +511,19 @@ input.core[type=search] { font-size: 1em; font-family: Helvetica, sans-serif; } -.core input[type=search], -input.core[type=search] { - /* Stop Webkit from styling search boxes in an inconsistent way */ - /* https://css-tricks.com/webkit-html5-search-inputs/ comments */ +/* Stop Webkit from styling search boxes in an inconsistent way */ +/* https://css-tricks.com/webkit-html5-search-inputs/ comments */ +input[type=search] { -webkit-appearance: textfield; } -.core input[type="search"]::-webkit-search-decoration, -input.core[type="search"]::-webkit-search-decoration, -.core input[type="search"]::-webkit-search-cancel-button, -input.core[type="search"]::-webkit-search-cancel-button, -.core input[type="search"]::-webkit-search-results-button, -input.core[type="search"]::-webkit-search-results-button, -.core input[type="search"]::-webkit-search-results-decoration, -input.core[type="search"]::-webkit-search-results-decoration { +input[type="search"]::-webkit-search-decoration, +input[type="search"]::-webkit-search-cancel-button, +input[type="search"]::-webkit-search-results-button, +input[type="search"]::-webkit-search-results-decoration { display: none; } -.core input[type=submit], -.core button[type=button], -input.core[type=submit], -button.core[type=button] { +form input[type=submit], form button[type=button] { font-weight: 400; cursor: pointer; text-align: center; @@ -560,16 +536,14 @@ button.core[type=button] { border-radius: .25rem; } -.core input[type=submit], -input.core[type=submit] { +form input[type=submit] { color: #fff; - background: linear-gradient(180deg, #007bff 0%, #4E79C7 100%); + background-color: #007bff; border-color: #007bff; -webkit-appearance: button; } -.core button[type=button], -button.core[type=button] { +form button[type=button] { color: #007bff; background-color: #fff; border-color: #007bff; @@ -599,9 +573,6 @@ button.core[type=button] { display: inline-block; margin-right: 0.3em; } -.select-wrapper:focus-within { - border: 1px solid black; -} .select-wrapper.filter-op { width: 80px; } @@ -759,7 +730,7 @@ p.zero-results { left: -9999px; } - table.rows-and-columns tr { + .rows-and-columns tr { border: 1px solid #ccc; margin-bottom: 1em; border-radius: 10px; @@ -767,7 +738,7 @@ p.zero-results { padding: 0.2rem; } - table.rows-and-columns td { + .rows-and-columns td { /* Behave like a "row" */ border: none; border-bottom: 1px solid #eee; @@ -775,7 +746,7 @@ p.zero-results { padding-left: 10%; } - table.rows-and-columns td:before { + .rows-and-columns td:before { display: block; color: black; margin-left: -10%; @@ -847,13 +818,6 @@ svg.dropdown-menu-icon { .dropdown-menu a:hover { background-color: #eee; } -.dropdown-menu .dropdown-description { - margin: 0; - color: #666; - font-size: 0.8em; - max-width: 80vw; - white-space: normal; -} .dropdown-menu .hook { display: block; position: absolute; diff --git a/datasette/static/datasette-manager.js b/datasette/static/datasette-manager.js deleted file mode 100644 index d2347ab3..00000000 --- a/datasette/static/datasette-manager.js +++ /dev/null @@ -1,210 +0,0 @@ -// Custom events for use with the native CustomEvent API -const DATASETTE_EVENTS = { - INIT: "datasette_init", // returns datasette manager instance in evt.detail -}; - -// Datasette "core" -> Methods/APIs that are foundational -// Plugins will have greater stability if they use the functional hooks- but if they do decide to hook into -// literal DOM selectors, they'll have an easier time using these addresses. -const DOM_SELECTORS = { - /** Should have one match */ - jsonExportLink: ".export-links a[href*=json]", - - /** Event listeners that go outside of the main table, e.g. existing scroll listener */ - tableWrapper: ".table-wrapper", - table: "table.rows-and-columns", - aboveTablePanel: ".above-table-panel", - - // These could have multiple matches - /** Used for selecting table headers. Use makeColumnActions if you want to add menu items. */ - tableHeaders: `table.rows-and-columns th`, - - /** Used to add "where" clauses to query using direct manipulation */ - filterRows: ".filter-row", - /** Used to show top available enum values for a column ("facets") */ - facetResults: ".facet-results [data-column]", -}; - -/** - * Monolith class for interacting with Datasette JS API - * Imported with DEFER, runs after main document parsed - * For now, manually synced with datasette/version.py - */ -const datasetteManager = { - VERSION: window.datasetteVersion, - - // TODO: Should order of registration matter more? - - // Should plugins be allowed to clobber others or is it last-in takes priority? - // Does pluginMetadata need to be serializable, or can we let it be stateful / have functions? - plugins: new Map(), - - registerPlugin: (name, pluginMetadata) => { - if (datasetteManager.plugins.has(name)) { - console.warn(`Warning -> plugin ${name} was redefined`); - } - datasetteManager.plugins.set(name, pluginMetadata); - - // If the plugin participates in the panel... update the panel. - if (pluginMetadata.makeAboveTablePanelConfigs) { - datasetteManager.renderAboveTablePanel(); - } - }, - - /** - * New DOM elements are created on each click, so the data is not stale. - * - * Items - * - must provide label (text) - * - might provide href (string) or an onclick ((evt) => void) - * - * columnMeta is metadata stored on the column header (TH) as a DOMStringMap - * - column: string - * - columnNotNull: boolean - * - columnType: sqlite datatype enum (text, number, etc) - * - isPk: boolean - */ - makeColumnActions: (columnMeta) => { - let columnActions = []; - - // Accept function that returns list of columnActions with keys - // Required: label (text) - // Optional: onClick or href - datasetteManager.plugins.forEach((plugin) => { - if (plugin.makeColumnActions) { - // Plugins can provide multiple columnActions if they want - // If multiple try to create entry with same label, the last one deletes the others - columnActions.push(...plugin.makeColumnActions(columnMeta)); - } - }); - - // TODO: Validate columnAction configs and give informative error message if missing keys. - return columnActions; - }, - - /** - * In MVP, each plugin can only have 1 instance. - * In future, panels could be repeated. We omit that for now since so many plugins depend on - * shared URL state, so having multiple instances of plugin at same time is problematic. - * Currently, we never destroy any panels, we just hide them. - * - * TODO: nicer panel css, show panel selection state. - * TODO: does this hook need to take any arguments? - */ - renderAboveTablePanel: () => { - const aboveTablePanel = document.querySelector( - DOM_SELECTORS.aboveTablePanel, - ); - - if (!aboveTablePanel) { - console.warn( - "This page does not have a table, the renderAboveTablePanel cannot be used.", - ); - return; - } - - let aboveTablePanelWrapper = aboveTablePanel.querySelector(".panels"); - - // First render: create wrappers. Otherwise, reuse previous. - if (!aboveTablePanelWrapper) { - aboveTablePanelWrapper = document.createElement("div"); - aboveTablePanelWrapper.classList.add("tab-contents"); - const panelNav = document.createElement("div"); - panelNav.classList.add("tab-controls"); - - // Temporary: css for minimal amount of breathing room. - panelNav.style.display = "flex"; - panelNav.style.gap = "8px"; - panelNav.style.marginTop = "4px"; - panelNav.style.marginBottom = "20px"; - - aboveTablePanel.appendChild(panelNav); - aboveTablePanel.appendChild(aboveTablePanelWrapper); - } - - datasetteManager.plugins.forEach((plugin, pluginName) => { - const { makeAboveTablePanelConfigs } = plugin; - - if (makeAboveTablePanelConfigs) { - const controls = aboveTablePanel.querySelector(".tab-controls"); - const contents = aboveTablePanel.querySelector(".tab-contents"); - - // Each plugin can make multiple panels - const configs = makeAboveTablePanelConfigs(); - - configs.forEach((config, i) => { - const nodeContentId = `${pluginName}_${config.id}_panel-content`; - - // quit if we've already registered this plugin - // TODO: look into whether plugins should be allowed to ask - // parent to re-render, or if they should manage that internally. - if (document.getElementById(nodeContentId)) { - return; - } - - // Add tab control button - const pluginControl = document.createElement("button"); - pluginControl.textContent = config.label; - pluginControl.onclick = () => { - contents.childNodes.forEach((node) => { - if (node.id === nodeContentId) { - node.style.display = "block"; - } else { - node.style.display = "none"; - } - }); - }; - controls.appendChild(pluginControl); - - // Add plugin content area - const pluginNode = document.createElement("div"); - pluginNode.id = nodeContentId; - config.render(pluginNode); - pluginNode.style.display = "none"; // Default to hidden unless you're ifrst - - contents.appendChild(pluginNode); - }); - - // Let first node be selected by default - if (contents.childNodes.length) { - contents.childNodes[0].style.display = "block"; - } - } - }); - }, - - /** Selectors for document (DOM) elements. Store identifier instead of immediate references in case they haven't loaded when Manager starts. */ - selectors: DOM_SELECTORS, - - // Future API ideas - // Fetch page's data in array, and cache so plugins could reuse it - // Provide knowledge of what datasette JS or server-side via traditional console autocomplete - // State helpers: URL params https://github.com/simonw/datasette/issues/1144 and localstorage - // UI Hooks: command + k, tab manager hook - // Should we notify plugins that have dependencies - // when all dependencies were fulfilled? (leaflet, codemirror, etc) - // https://github.com/simonw/datasette-leaflet -> this way - // multiple plugins can all request the same copy of leaflet. -}; - -const initializeDatasette = () => { - // Hide the global behind __ prefix. Ideally they should be listening for the - // DATASETTE_EVENTS.INIT event to avoid the habit of reading from the window. - - window.__DATASETTE__ = datasetteManager; - console.debug("Datasette Manager Created!"); - - const initDatasetteEvent = new CustomEvent(DATASETTE_EVENTS.INIT, { - detail: datasetteManager, - }); - - document.dispatchEvent(initDatasetteEvent); -}; - -/** - * Main function - * Fires AFTER the document has been parsed - */ -document.addEventListener("DOMContentLoaded", function () { - initializeDatasette(); -}); diff --git a/datasette/static/json-format-highlight-1.0.1.js b/datasette/static/json-format-highlight-1.0.1.js index 0e6e2c29..d83b8186 100644 --- a/datasette/static/json-format-highlight-1.0.1.js +++ b/datasette/static/json-format-highlight-1.0.1.js @@ -7,8 +7,8 @@ MIT Licensed typeof exports === "object" && typeof module !== "undefined" ? (module.exports = factory()) : typeof define === "function" && define.amd - ? define(factory) - : (global.jsonFormatHighlight = factory()); + ? define(factory) + : (global.jsonFormatHighlight = factory()); })(this, function () { "use strict"; @@ -42,13 +42,13 @@ MIT Licensed color = /true/.test(match) ? colors.trueColor : /false/.test(match) - ? colors.falseColor - : /null/.test(match) - ? colors.nullColor - : color; + ? colors.falseColor + : /null/.test(match) + ? colors.nullColor + : color; } return '' + match + ""; - }, + } ); } diff --git a/datasette/static/navigation-search.js b/datasette/static/navigation-search.js deleted file mode 100644 index 48de5c4f..00000000 --- a/datasette/static/navigation-search.js +++ /dev/null @@ -1,416 +0,0 @@ -class NavigationSearch extends HTMLElement { - constructor() { - super(); - this.attachShadow({ mode: "open" }); - this.selectedIndex = -1; - this.matches = []; - this.debounceTimer = null; - - this.render(); - this.setupEventListeners(); - } - - render() { - this.shadowRoot.innerHTML = ` - - - -
-
- -
-
-
- Navigate - Enter Select - Esc Close -
-
-
- `; - } - - setupEventListeners() { - const dialog = this.shadowRoot.querySelector("dialog"); - const input = this.shadowRoot.querySelector(".search-input"); - const resultsContainer = - this.shadowRoot.querySelector(".results-container"); - - // Global keyboard listener for "/" - document.addEventListener("keydown", (e) => { - if (e.key === "/" && !this.isInputFocused() && !dialog.open) { - e.preventDefault(); - this.openMenu(); - } - }); - - // Input event - input.addEventListener("input", (e) => { - this.handleSearch(e.target.value); - }); - - // Keyboard navigation - input.addEventListener("keydown", (e) => { - if (e.key === "ArrowDown") { - e.preventDefault(); - this.moveSelection(1); - } else if (e.key === "ArrowUp") { - e.preventDefault(); - this.moveSelection(-1); - } else if (e.key === "Enter") { - e.preventDefault(); - this.selectCurrentItem(); - } else if (e.key === "Escape") { - this.closeMenu(); - } - }); - - // Click on result item - resultsContainer.addEventListener("click", (e) => { - const item = e.target.closest(".result-item"); - if (item) { - const index = parseInt(item.dataset.index); - this.selectItem(index); - } - }); - - // Close on backdrop click - dialog.addEventListener("click", (e) => { - if (e.target === dialog) { - this.closeMenu(); - } - }); - - // Initial load - this.loadInitialData(); - } - - isInputFocused() { - const activeElement = document.activeElement; - return ( - activeElement && - (activeElement.tagName === "INPUT" || - activeElement.tagName === "TEXTAREA" || - activeElement.isContentEditable) - ); - } - - loadInitialData() { - const itemsAttr = this.getAttribute("items"); - if (itemsAttr) { - try { - this.allItems = JSON.parse(itemsAttr); - this.matches = this.allItems; - } catch (e) { - console.error("Failed to parse items attribute:", e); - this.allItems = []; - this.matches = []; - } - } - } - - handleSearch(query) { - clearTimeout(this.debounceTimer); - - this.debounceTimer = setTimeout(() => { - const url = this.getAttribute("url"); - - if (url) { - // Fetch from API - this.fetchResults(url, query); - } else { - // Filter local items - this.filterLocalItems(query); - } - }, 200); - } - - async fetchResults(url, query) { - try { - const searchUrl = `${url}?q=${encodeURIComponent(query)}`; - const response = await fetch(searchUrl); - const data = await response.json(); - this.matches = data.matches || []; - this.selectedIndex = this.matches.length > 0 ? 0 : -1; - this.renderResults(); - } catch (e) { - console.error("Failed to fetch search results:", e); - this.matches = []; - this.renderResults(); - } - } - - filterLocalItems(query) { - if (!query.trim()) { - this.matches = []; - } else { - const lowerQuery = query.toLowerCase(); - this.matches = (this.allItems || []).filter( - (item) => - item.name.toLowerCase().includes(lowerQuery) || - item.url.toLowerCase().includes(lowerQuery), - ); - } - this.selectedIndex = this.matches.length > 0 ? 0 : -1; - this.renderResults(); - } - - renderResults() { - const container = this.shadowRoot.querySelector(".results-container"); - const input = this.shadowRoot.querySelector(".search-input"); - - if (this.matches.length === 0) { - const message = input.value.trim() - ? "No results found" - : "Start typing to search..."; - container.innerHTML = `
${message}
`; - return; - } - - container.innerHTML = this.matches - .map( - (match, index) => ` -
-
-
${this.escapeHtml( - match.name, - )}
-
${this.escapeHtml(match.url)}
-
-
- `, - ) - .join(""); - - // Scroll selected item into view - if (this.selectedIndex >= 0) { - const selectedItem = container.children[this.selectedIndex]; - if (selectedItem) { - selectedItem.scrollIntoView({ block: "nearest" }); - } - } - } - - moveSelection(direction) { - const newIndex = this.selectedIndex + direction; - if (newIndex >= 0 && newIndex < this.matches.length) { - this.selectedIndex = newIndex; - this.renderResults(); - } - } - - selectCurrentItem() { - if (this.selectedIndex >= 0 && this.selectedIndex < this.matches.length) { - this.selectItem(this.selectedIndex); - } - } - - selectItem(index) { - const match = this.matches[index]; - if (match) { - // Dispatch custom event - this.dispatchEvent( - new CustomEvent("select", { - detail: match, - bubbles: true, - composed: true, - }), - ); - - // Navigate to URL - window.location.href = match.url; - - this.closeMenu(); - } - } - - openMenu() { - const dialog = this.shadowRoot.querySelector("dialog"); - const input = this.shadowRoot.querySelector(".search-input"); - - dialog.showModal(); - input.value = ""; - input.focus(); - - // Reset state - start with no items shown - this.matches = []; - this.selectedIndex = -1; - this.renderResults(); - } - - closeMenu() { - const dialog = this.shadowRoot.querySelector("dialog"); - dialog.close(); - } - - escapeHtml(text) { - const div = document.createElement("div"); - div.textContent = text; - return div.innerHTML; - } -} - -// Register the custom element -customElements.define("navigation-search", NavigationSearch); diff --git a/datasette/static/table.js b/datasette/static/table.js index 0caeeb91..51e901a5 100644 --- a/datasette/static/table.js +++ b/datasette/static/table.js @@ -17,8 +17,7 @@ var DROPDOWN_ICON_SVG = ` `; -/** Main initialization function for Datasette Table interactions */ -const initDatasetteTable = function (manager) { +(function () { // Feature detection if (!window.URLSearchParams) { return; @@ -69,11 +68,13 @@ const initDatasetteTable = function (manager) { menu.style.display = "none"; menu.classList.remove("anim-scale-in"); } - - const tableWrapper = document.querySelector(manager.selectors.tableWrapper); - if (tableWrapper) { - tableWrapper.addEventListener("scroll", closeMenu); - } + // When page loads, add scroll listener on .table-wrapper + document.addEventListener("DOMContentLoaded", () => { + var tableWrapper = document.querySelector(".table-wrapper"); + if (tableWrapper) { + tableWrapper.addEventListener("scroll", closeMenu); + } + }); document.body.addEventListener("click", (ev) => { /* was this click outside the menu? */ var target = ev.target; @@ -84,11 +85,9 @@ const initDatasetteTable = function (manager) { closeMenu(); } }); - - function onTableHeaderClick(ev) { + function iconClicked(ev) { ev.preventDefault(); ev.stopPropagation(); - menu.innerHTML = DROPDOWN_HTML; var th = ev.target; while (th.nodeName != "TH") { th = th.parentNode; @@ -132,7 +131,7 @@ const initDatasetteTable = function (manager) { /* Only show "Facet by this" if it's not the first column, not selected, not a single PK and the Datasette allow_facet setting is True */ var displayedFacets = Array.from( - document.querySelectorAll(".facet-info"), + document.querySelectorAll(".facet-info") ).map((el) => el.dataset.column); var isFirstColumn = th.parentElement.querySelector("th:first-of-type") == th; @@ -152,7 +151,7 @@ const initDatasetteTable = function (manager) { } /* Show notBlank option if not selected AND at least one visible blank value */ var tdsForThisColumn = Array.from( - th.closest("table").querySelectorAll("td." + th.className), + th.closest("table").querySelectorAll("td." + th.className) ); if ( params.get(`${column}__notblank`) != "1" && @@ -186,61 +185,7 @@ const initDatasetteTable = function (manager) { menu.style.left = menuLeft + "px"; menu.style.display = "block"; menu.classList.add("anim-scale-in"); - - // Custom menu items on each render - // Plugin hook: allow adding JS-based additional menu items - const columnActionsPayload = { - columnName: th.dataset.column, - columnNotNull: th.dataset.columnNotNull === "1", - columnType: th.dataset.columnType, - isPk: th.dataset.isPk === "1", - }; - const columnItemConfigs = manager.makeColumnActions(columnActionsPayload); - - const menuList = menu.querySelector("ul"); - columnItemConfigs.forEach((itemConfig) => { - // Remove items from previous render. We assume entries have unique labels. - const existingItems = menuList.querySelectorAll(`li`); - Array.from(existingItems) - .filter((item) => item.innerText === itemConfig.label) - .forEach((node) => { - node.remove(); - }); - - const newLink = document.createElement("a"); - newLink.textContent = itemConfig.label; - newLink.href = itemConfig.href ?? "#"; - if (itemConfig.onClick) { - newLink.onclick = itemConfig.onClick; - } - - // Attach new elements to DOM - const menuItem = document.createElement("li"); - menuItem.appendChild(newLink); - menuList.appendChild(menuItem); - }); - - // Measure width of menu and adjust position if too far right - const menuWidth = menu.offsetWidth; - const windowWidth = window.innerWidth; - if (menuLeft + menuWidth > windowWidth) { - menu.style.left = windowWidth - menuWidth - 20 + "px"; - } - // Align menu .hook arrow with the column cog icon - const hook = menu.querySelector(".hook"); - const icon = th.querySelector(".dropdown-menu-icon"); - const iconRect = icon.getBoundingClientRect(); - const hookLeft = iconRect.left - menuLeft + 1 + "px"; - hook.style.left = hookLeft; - // Move the whole menu right if the hook is too far right - const menuRect = menu.getBoundingClientRect(); - if (iconRect.right > menuRect.right) { - menu.style.left = iconRect.right - menuWidth + "px"; - // And move hook tip as well - hook.style.left = menuWidth - 13 + "px"; - } } - var svg = document.createElement("div"); svg.innerHTML = DROPDOWN_ICON_SVG; svg = svg.querySelector("*"); @@ -252,25 +197,23 @@ const initDatasetteTable = function (manager) { menu.style.display = "none"; document.body.appendChild(menu); - var ths = Array.from( - document.querySelectorAll(manager.selectors.tableHeaders), - ); + var ths = Array.from(document.querySelectorAll(".rows-and-columns th")); ths.forEach((th) => { if (!th.querySelector("a")) { return; } var icon = svg.cloneNode(true); - icon.addEventListener("click", onTableHeaderClick); + icon.addEventListener("click", iconClicked); th.appendChild(icon); }); -}; +})(); /* Add x buttons to the filter rows */ -function addButtonsToFilterRows(manager) { +(function () { var x = "✖"; - var rows = Array.from( - document.querySelectorAll(manager.selectors.filterRow), - ).filter((el) => el.querySelector(".filter-op")); + var rows = Array.from(document.querySelectorAll(".filter-row")).filter((el) => + el.querySelector(".filter-op") + ); rows.forEach((row) => { var a = document.createElement("a"); a.setAttribute("href", "#"); @@ -291,18 +234,18 @@ function addButtonsToFilterRows(manager) { a.style.display = "none"; } }); -} +})(); /* Set up datalist autocomplete for filter values */ -function initAutocompleteForFilterValues(manager) { +(function () { function createDataLists() { var facetResults = document.querySelectorAll( - manager.selectors.facetResults, + ".facet-results [data-column]" ); Array.from(facetResults).forEach(function (facetResult) { // Use link text from all links in the facet result var links = Array.from( - facetResult.querySelectorAll("li:not(.facet-truncated) a"), + facetResult.querySelectorAll("li:not(.facet-truncated) a") ); // Create a datalist element var datalist = document.createElement("datalist"); @@ -323,21 +266,9 @@ function initAutocompleteForFilterValues(manager) { document.body.addEventListener("change", function (event) { if (event.target.name === "_filter_column") { event.target - .closest(manager.selectors.filterRow) + .closest(".filter-row") .querySelector(".filter-value") .setAttribute("list", "datalist-" + event.target.value); } }); -} - -// Ensures Table UI is initialized only after the Manager is ready. -document.addEventListener("datasette_init", function (evt) { - const { detail: manager } = evt; - - // Main table - initDatasetteTable(manager); - - // Other UI functions with interactive JS needs - addButtonsToFilterRows(manager); - initAutocompleteForFilterValues(manager); -}); +})(); diff --git a/datasette/templates/_action_menu.html b/datasette/templates/_action_menu.html deleted file mode 100644 index 7d1d4a55..00000000 --- a/datasette/templates/_action_menu.html +++ /dev/null @@ -1,28 +0,0 @@ -{% if action_links %} -
- -
-{% endif %} \ No newline at end of file diff --git a/datasette/templates/_debug_common_functions.html b/datasette/templates/_debug_common_functions.html deleted file mode 100644 index d988a2f3..00000000 --- a/datasette/templates/_debug_common_functions.html +++ /dev/null @@ -1,50 +0,0 @@ - diff --git a/datasette/templates/_description_source_license.html b/datasette/templates/_description_source_license.html index f852268f..a2bc18f2 100644 --- a/datasette/templates/_description_source_license.html +++ b/datasette/templates/_description_source_license.html @@ -1,6 +1,6 @@ -{% if metadata.get("description_html") or metadata.get("description") %} +{% if metadata.description_html or metadata.description %}
diff --git a/datasette/templates/allow_debug.html b/datasette/templates/allow_debug.html index 1ecc92df..04181531 100644 --- a/datasette/templates/allow_debug.html +++ b/datasette/templates/allow_debug.html @@ -33,12 +33,9 @@ p.message-warning {

Debug allow rules

-{% set current_tab = "allow_debug" %} -{% include "_permissions_debug_tabs.html" %} -

Use this tool to try out different actor and allow combinations. See Defining permissions with "allow" blocks for documentation.

- +

diff --git a/datasette/templates/api_explorer.html b/datasette/templates/api_explorer.html index dc393c20..ea95c023 100644 --- a/datasette/templates/api_explorer.html +++ b/datasette/templates/api_explorer.html @@ -8,7 +8,7 @@ {% block content %} -

API Explorer{% if private %} 🔒{% endif %}

+

API Explorer

Use this tool to try out the {% if datasette_version %} @@ -19,7 +19,7 @@

GET - +
@@ -29,7 +29,7 @@
POST - +
diff --git a/datasette/templates/base.html b/datasette/templates/base.html index 0d89e11c..4b763398 100644 --- a/datasette/templates/base.html +++ b/datasette/templates/base.html @@ -1,16 +1,14 @@ {% import "_crumbs.html" as crumbs with context %} - + {% block title %}{% endblock %} {% for url in extra_css_urls %} - + {% endfor %} - - {% for url in extra_js_urls %} - + {% endfor %} {%- if alternate_url_json -%} @@ -19,7 +17,7 @@
- - - - - - - - - - - - {% for check in permission_checks %} - - - - - - - - - {% endfor %} - -
WhenActionParentChildActorResult
{{ check.when.split('T', 1)[0] }}
{{ check.when.split('T', 1)[1].split('+', 1)[0].split('-', 1)[0].split('Z', 1)[0] }}
{{ check.action }}{{ check.parent or '—' }}{{ check.child or '—' }}{% if check.actor %}{{ check.actor|tojson }}{% else %}anonymous{% endif %}{% if check.result %}Allowed{% elif check.result is none %}No opinion{% else %}Denied{% endif %}
-{% else %} -

No permission checks have been recorded yet.

-{% endif %} - -{% endblock %} diff --git a/datasette/templates/debug_rules.html b/datasette/templates/debug_rules.html deleted file mode 100644 index 9a290803..00000000 --- a/datasette/templates/debug_rules.html +++ /dev/null @@ -1,203 +0,0 @@ -{% extends "base.html" %} - -{% block title %}Permission Rules{% endblock %} - -{% block extra_head %} - -{% include "_permission_ui_styles.html" %} -{% include "_debug_common_functions.html" %} -{% endblock %} - -{% block content %} -

Permission rules

- -{% set current_tab = "rules" %} -{% include "_permissions_debug_tabs.html" %} - -

Use this tool to view the permission rules that allow the current actor to access resources for a given permission action. It queries the /-/rules.json API endpoint.

- -{% if request.actor %} -

Current actor: {{ request.actor.get("id", "anonymous") }}

-{% else %} -

Current actor: anonymous (not logged in)

-{% endif %} - -
-
-
- - - The permission action to check -
- -
- - - Number of results per page (max 200) -
- -
- -
-
-
- - - - - -{% endblock %} diff --git a/datasette/templates/index.html b/datasette/templates/index.html index 03349279..06e09635 100644 --- a/datasette/templates/index.html +++ b/datasette/templates/index.html @@ -2,26 +2,17 @@ {% block title %}{{ metadata.title or "Datasette" }}: {% for database in databases %}{{ database.name }}{% if not loop.last %}, {% endif %}{% endfor %}{% endblock %} -{% block extra_head %} -{% if noindex %}{% endif %} -{% endblock %} - {% block body_class %}index{% endblock %} {% block content %}

{{ metadata.title or "Datasette" }}{% if private %} 🔒{% endif %}

-{% set action_links, action_title = homepage_actions, "Homepage actions" %} -{% include "_action_menu.html" %} - -{{ top_homepage() }} - {% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %} {% for database in databases %}

{{ database.name }}{% if database.private %} 🔒{% endif %}

- {% if database.show_table_row_counts %}{{ "{:,}".format(database.table_rows_sum) }} rows in {% endif %}{{ database.tables_count }} table{% if database.tables_count != 1 %}s{% endif %}{% if database.hidden_tables_count %}, {% endif -%} + {% if database.show_table_row_counts %}{{ "{:,}".format(database.table_rows_sum) }} rows in {% endif %}{{ database.tables_count }} table{% if database.tables_count != 1 %}s{% endif %}{% if database.tables_count and database.hidden_tables_count %}, {% endif -%} {% if database.hidden_tables_count -%} {% if database.show_table_row_counts %}{{ "{:,}".format(database.hidden_table_rows_sum) }} rows in {% endif %}{{ database.hidden_tables_count }} hidden table{% if database.hidden_tables_count != 1 %}s{% endif -%} {% endif -%} diff --git a/datasette/templates/logout.html b/datasette/templates/logout.html index c8fc642a..4c4a7d11 100644 --- a/datasette/templates/logout.html +++ b/datasette/templates/logout.html @@ -8,7 +8,7 @@

You are logged in as {{ display_actor(actor) }}

-
+
diff --git a/datasette/templates/messages_debug.html b/datasette/templates/messages_debug.html index 2940cd69..e0ab9a40 100644 --- a/datasette/templates/messages_debug.html +++ b/datasette/templates/messages_debug.html @@ -8,7 +8,7 @@

Set a message:

- +
diff --git a/datasette/templates/patterns.html b/datasette/templates/patterns.html index 7770f7d4..9905df2c 100644 --- a/datasette/templates/patterns.html +++ b/datasette/templates/patterns.html @@ -1,5 +1,5 @@ - + Datasette: Pattern Portfolio @@ -9,7 +9,7 @@ -
@@ -45,7 +45,7 @@

Header for /database/table/row and Messages

-
+