mirror of
https://github.com/simonw/datasette.git
synced 2025-12-10 16:51:24 +01:00
Compare commits
1 commit
main
...
prepare_as
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
93bfa26bfd |
278 changed files with 19857 additions and 56441 deletions
|
|
@ -1,2 +0,0 @@
|
||||||
[run]
|
|
||||||
omit = datasette/_version.py, datasette/utils/shutil_backport.py
|
|
||||||
|
|
@ -3,11 +3,10 @@
|
||||||
.eggs
|
.eggs
|
||||||
.gitignore
|
.gitignore
|
||||||
.ipynb_checkpoints
|
.ipynb_checkpoints
|
||||||
|
.travis.yml
|
||||||
build
|
build
|
||||||
*.spec
|
*.spec
|
||||||
*.egg-info
|
*.egg-info
|
||||||
dist
|
dist
|
||||||
scratchpad
|
scratchpad
|
||||||
venv
|
venv
|
||||||
*.db
|
|
||||||
*.sqlite
|
|
||||||
|
|
|
||||||
|
|
@ -1,4 +0,0 @@
|
||||||
# Applying Black
|
|
||||||
35d6ee2790e41e96f243c1ff58be0c9c0519a8ce
|
|
||||||
368638555160fb9ac78f462d0f79b1394163fa30
|
|
||||||
2b344f6a34d2adaa305996a1a580ece06397f6e4
|
|
||||||
1
.gitattributes
vendored
1
.gitattributes
vendored
|
|
@ -1 +1,2 @@
|
||||||
|
datasette/_version.py export-subst
|
||||||
datasette/static/codemirror-* linguist-vendored
|
datasette/static/codemirror-* linguist-vendored
|
||||||
|
|
|
||||||
1
.github/FUNDING.yml
vendored
1
.github/FUNDING.yml
vendored
|
|
@ -1 +0,0 @@
|
||||||
github: [simonw]
|
|
||||||
11
.github/dependabot.yml
vendored
11
.github/dependabot.yml
vendored
|
|
@ -1,11 +0,0 @@
|
||||||
version: 2
|
|
||||||
updates:
|
|
||||||
- package-ecosystem: pip
|
|
||||||
directory: "/"
|
|
||||||
schedule:
|
|
||||||
interval: daily
|
|
||||||
time: "13:00"
|
|
||||||
groups:
|
|
||||||
python-packages:
|
|
||||||
patterns:
|
|
||||||
- "*"
|
|
||||||
35
.github/workflows/deploy-branch-preview.yml
vendored
35
.github/workflows/deploy-branch-preview.yml
vendored
|
|
@ -1,35 +0,0 @@
|
||||||
name: Deploy a Datasette branch preview to Vercel
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
branch:
|
|
||||||
description: "Branch to deploy"
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
deploy-branch-preview:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Set up Python 3.11
|
|
||||||
uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: "3.11"
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
pip install datasette-publish-vercel
|
|
||||||
- name: Deploy the preview
|
|
||||||
env:
|
|
||||||
VERCEL_TOKEN: ${{ secrets.BRANCH_PREVIEW_VERCEL_TOKEN }}
|
|
||||||
run: |
|
|
||||||
export BRANCH="${{ github.event.inputs.branch }}"
|
|
||||||
wget https://latest.datasette.io/fixtures.db
|
|
||||||
datasette publish vercel fixtures.db \
|
|
||||||
--branch $BRANCH \
|
|
||||||
--project "datasette-preview-$BRANCH" \
|
|
||||||
--token $VERCEL_TOKEN \
|
|
||||||
--scope datasette \
|
|
||||||
--about "Preview of $BRANCH" \
|
|
||||||
--about_url "https://github.com/simonw/datasette/tree/$BRANCH"
|
|
||||||
132
.github/workflows/deploy-latest.yml
vendored
132
.github/workflows/deploy-latest.yml
vendored
|
|
@ -1,132 +0,0 @@
|
||||||
name: Deploy latest.datasette.io
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
# - 1.0-dev
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
deploy:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Check out datasette
|
|
||||||
uses: actions/checkout@v5
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: "3.13"
|
|
||||||
cache: pip
|
|
||||||
- name: Install Python dependencies
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pip
|
|
||||||
python -m pip install -e .[test]
|
|
||||||
python -m pip install -e .[docs]
|
|
||||||
python -m pip install sphinx-to-sqlite==0.1a1
|
|
||||||
- name: Run tests
|
|
||||||
if: ${{ github.ref == 'refs/heads/main' }}
|
|
||||||
run: |
|
|
||||||
pytest -n auto -m "not serial"
|
|
||||||
pytest -m "serial"
|
|
||||||
- name: Build fixtures.db and other files needed to deploy the demo
|
|
||||||
run: |-
|
|
||||||
python tests/fixtures.py \
|
|
||||||
fixtures.db \
|
|
||||||
fixtures-config.json \
|
|
||||||
fixtures-metadata.json \
|
|
||||||
plugins \
|
|
||||||
--extra-db-filename extra_database.db
|
|
||||||
- name: Build docs.db
|
|
||||||
if: ${{ github.ref == 'refs/heads/main' }}
|
|
||||||
run: |-
|
|
||||||
cd docs
|
|
||||||
DISABLE_SPHINX_INLINE_TABS=1 sphinx-build -b xml . _build
|
|
||||||
sphinx-to-sqlite ../docs.db _build
|
|
||||||
cd ..
|
|
||||||
- name: Set up the alternate-route demo
|
|
||||||
run: |
|
|
||||||
echo '
|
|
||||||
from datasette import hookimpl
|
|
||||||
|
|
||||||
@hookimpl
|
|
||||||
def startup(datasette):
|
|
||||||
db = datasette.get_database("fixtures2")
|
|
||||||
db.route = "alternative-route"
|
|
||||||
' > plugins/alternative_route.py
|
|
||||||
cp fixtures.db fixtures2.db
|
|
||||||
- name: And the counters writable canned query demo
|
|
||||||
run: |
|
|
||||||
cat > plugins/counters.py <<EOF
|
|
||||||
from datasette import hookimpl
|
|
||||||
@hookimpl
|
|
||||||
def startup(datasette):
|
|
||||||
db = datasette.add_memory_database("counters")
|
|
||||||
async def inner():
|
|
||||||
await db.execute_write("create table if not exists counters (name text primary key, value integer)")
|
|
||||||
await db.execute_write("insert or ignore into counters (name, value) values ('counter_a', 0)")
|
|
||||||
await db.execute_write("insert or ignore into counters (name, value) values ('counter_b', 0)")
|
|
||||||
await db.execute_write("insert or ignore into counters (name, value) values ('counter_c', 0)")
|
|
||||||
return inner
|
|
||||||
@hookimpl
|
|
||||||
def canned_queries(database):
|
|
||||||
if database == "counters":
|
|
||||||
queries = {}
|
|
||||||
for name in ("counter_a", "counter_b", "counter_c"):
|
|
||||||
queries["increment_{}".format(name)] = {
|
|
||||||
"sql": "update counters set value = value + 1 where name = '{}'".format(name),
|
|
||||||
"on_success_message_sql": "select 'Counter {name} incremented to ' || value from counters where name = '{name}'".format(name=name),
|
|
||||||
"write": True,
|
|
||||||
}
|
|
||||||
queries["decrement_{}".format(name)] = {
|
|
||||||
"sql": "update counters set value = value - 1 where name = '{}'".format(name),
|
|
||||||
"on_success_message_sql": "select 'Counter {name} decremented to ' || value from counters where name = '{name}'".format(name=name),
|
|
||||||
"write": True,
|
|
||||||
}
|
|
||||||
return queries
|
|
||||||
EOF
|
|
||||||
# - name: Make some modifications to metadata.json
|
|
||||||
# run: |
|
|
||||||
# cat fixtures.json | \
|
|
||||||
# jq '.databases |= . + {"ephemeral": {"allow": {"id": "*"}}}' | \
|
|
||||||
# jq '.plugins |= . + {"datasette-ephemeral-tables": {"table_ttl": 900}}' \
|
|
||||||
# > metadata.json
|
|
||||||
# cat metadata.json
|
|
||||||
- id: auth
|
|
||||||
name: Authenticate to Google Cloud
|
|
||||||
uses: google-github-actions/auth@v3
|
|
||||||
with:
|
|
||||||
credentials_json: ${{ secrets.GCP_SA_KEY }}
|
|
||||||
- name: Set up Cloud SDK
|
|
||||||
uses: google-github-actions/setup-gcloud@v3
|
|
||||||
- name: Deploy to Cloud Run
|
|
||||||
env:
|
|
||||||
LATEST_DATASETTE_SECRET: ${{ secrets.LATEST_DATASETTE_SECRET }}
|
|
||||||
run: |-
|
|
||||||
gcloud config set run/region us-central1
|
|
||||||
gcloud config set project datasette-222320
|
|
||||||
export SUFFIX="-${GITHUB_REF#refs/heads/}"
|
|
||||||
export SUFFIX=${SUFFIX#-main}
|
|
||||||
# Replace 1.0 with one-dot-zero in SUFFIX
|
|
||||||
export SUFFIX=${SUFFIX//1.0/one-dot-zero}
|
|
||||||
datasette publish cloudrun fixtures.db fixtures2.db extra_database.db \
|
|
||||||
-m fixtures-metadata.json \
|
|
||||||
--plugins-dir=plugins \
|
|
||||||
--branch=$GITHUB_SHA \
|
|
||||||
--version-note=$GITHUB_SHA \
|
|
||||||
--extra-options="--setting template_debug 1 --setting trace_debug 1 --crossdb" \
|
|
||||||
--install 'datasette-ephemeral-tables>=0.2.2' \
|
|
||||||
--service "datasette-latest$SUFFIX" \
|
|
||||||
--secret $LATEST_DATASETTE_SECRET
|
|
||||||
- name: Deploy to docs as well (only for main)
|
|
||||||
if: ${{ github.ref == 'refs/heads/main' }}
|
|
||||||
run: |-
|
|
||||||
# Deploy docs.db to a different service
|
|
||||||
datasette publish cloudrun docs.db \
|
|
||||||
--branch=$GITHUB_SHA \
|
|
||||||
--version-note=$GITHUB_SHA \
|
|
||||||
--extra-options="--setting template_debug 1" \
|
|
||||||
--service=datasette-docs-latest
|
|
||||||
16
.github/workflows/documentation-links.yml
vendored
16
.github/workflows/documentation-links.yml
vendored
|
|
@ -1,16 +0,0 @@
|
||||||
name: Read the Docs Pull Request Preview
|
|
||||||
on:
|
|
||||||
pull_request_target:
|
|
||||||
types:
|
|
||||||
- opened
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
pull-requests: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
documentation-links:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: readthedocs/actions/preview@v1
|
|
||||||
with:
|
|
||||||
project-slug: "datasette"
|
|
||||||
25
.github/workflows/prettier.yml
vendored
25
.github/workflows/prettier.yml
vendored
|
|
@ -1,25 +0,0 @@
|
||||||
name: Check JavaScript for conformance with Prettier
|
|
||||||
|
|
||||||
on: [push]
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
prettier:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Check out repo
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- uses: actions/cache@v4
|
|
||||||
name: Configure npm caching
|
|
||||||
with:
|
|
||||||
path: ~/.npm
|
|
||||||
key: ${{ runner.OS }}-npm-${{ hashFiles('**/package-lock.json') }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.OS }}-npm-
|
|
||||||
- name: Install dependencies
|
|
||||||
run: npm ci
|
|
||||||
- name: Run prettier
|
|
||||||
run: |-
|
|
||||||
npm run prettier -- --check
|
|
||||||
109
.github/workflows/publish.yml
vendored
109
.github/workflows/publish.yml
vendored
|
|
@ -1,109 +0,0 @@
|
||||||
name: Publish Python Package
|
|
||||||
|
|
||||||
on:
|
|
||||||
release:
|
|
||||||
types: [created]
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"]
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
|
||||||
uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
cache: pip
|
|
||||||
cache-dependency-path: pyproject.toml
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
pip install -e '.[test]'
|
|
||||||
- name: Run tests
|
|
||||||
run: |
|
|
||||||
pytest
|
|
||||||
|
|
||||||
deploy:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [test]
|
|
||||||
environment: release
|
|
||||||
permissions:
|
|
||||||
id-token: write
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: '3.13'
|
|
||||||
cache: pip
|
|
||||||
cache-dependency-path: pyproject.toml
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
pip install setuptools wheel build
|
|
||||||
- name: Build
|
|
||||||
run: |
|
|
||||||
python -m build
|
|
||||||
- name: Publish
|
|
||||||
uses: pypa/gh-action-pypi-publish@release/v1
|
|
||||||
|
|
||||||
deploy_static_docs:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [deploy]
|
|
||||||
if: "!github.event.release.prerelease"
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: '3.10'
|
|
||||||
cache: pip
|
|
||||||
cache-dependency-path: pyproject.toml
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
python -m pip install -e .[docs]
|
|
||||||
python -m pip install sphinx-to-sqlite==0.1a1
|
|
||||||
- name: Build docs.db
|
|
||||||
run: |-
|
|
||||||
cd docs
|
|
||||||
DISABLE_SPHINX_INLINE_TABS=1 sphinx-build -b xml . _build
|
|
||||||
sphinx-to-sqlite ../docs.db _build
|
|
||||||
cd ..
|
|
||||||
- id: auth
|
|
||||||
name: Authenticate to Google Cloud
|
|
||||||
uses: google-github-actions/auth@v2
|
|
||||||
with:
|
|
||||||
credentials_json: ${{ secrets.GCP_SA_KEY }}
|
|
||||||
- name: Set up Cloud SDK
|
|
||||||
uses: google-github-actions/setup-gcloud@v3
|
|
||||||
- name: Deploy stable-docs.datasette.io to Cloud Run
|
|
||||||
run: |-
|
|
||||||
gcloud config set run/region us-central1
|
|
||||||
gcloud config set project datasette-222320
|
|
||||||
datasette publish cloudrun docs.db \
|
|
||||||
--service=datasette-docs-stable
|
|
||||||
|
|
||||||
deploy_docker:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [deploy]
|
|
||||||
if: "!github.event.release.prerelease"
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: Build and push to Docker Hub
|
|
||||||
env:
|
|
||||||
DOCKER_USER: ${{ secrets.DOCKER_USER }}
|
|
||||||
DOCKER_PASS: ${{ secrets.DOCKER_PASS }}
|
|
||||||
run: |-
|
|
||||||
sleep 60 # Give PyPI time to make the new release available
|
|
||||||
docker login -u $DOCKER_USER -p $DOCKER_PASS
|
|
||||||
export REPO=datasetteproject/datasette
|
|
||||||
docker build -f Dockerfile \
|
|
||||||
-t $REPO:${GITHUB_REF#refs/tags/} \
|
|
||||||
--build-arg VERSION=${GITHUB_REF#refs/tags/} .
|
|
||||||
docker tag $REPO:${GITHUB_REF#refs/tags/} $REPO:latest
|
|
||||||
docker push $REPO:${GITHUB_REF#refs/tags/}
|
|
||||||
docker push $REPO:latest
|
|
||||||
28
.github/workflows/push_docker_tag.yml
vendored
28
.github/workflows/push_docker_tag.yml
vendored
|
|
@ -1,28 +0,0 @@
|
||||||
name: Push specific Docker tag
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
version_tag:
|
|
||||||
description: Tag to build and push
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
deploy_docker:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Build and push to Docker Hub
|
|
||||||
env:
|
|
||||||
DOCKER_USER: ${{ secrets.DOCKER_USER }}
|
|
||||||
DOCKER_PASS: ${{ secrets.DOCKER_PASS }}
|
|
||||||
VERSION_TAG: ${{ github.event.inputs.version_tag }}
|
|
||||||
run: |-
|
|
||||||
docker login -u $DOCKER_USER -p $DOCKER_PASS
|
|
||||||
export REPO=datasetteproject/datasette
|
|
||||||
docker build -f Dockerfile \
|
|
||||||
-t $REPO:${VERSION_TAG} \
|
|
||||||
--build-arg VERSION=${VERSION_TAG} .
|
|
||||||
docker push $REPO:${VERSION_TAG}
|
|
||||||
27
.github/workflows/spellcheck.yml
vendored
27
.github/workflows/spellcheck.yml
vendored
|
|
@ -1,27 +0,0 @@
|
||||||
name: Check spelling in documentation
|
|
||||||
|
|
||||||
on: [push, pull_request]
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
spellcheck:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: '3.11'
|
|
||||||
cache: 'pip'
|
|
||||||
cache-dependency-path: '**/pyproject.toml'
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
pip install -e '.[docs]'
|
|
||||||
- name: Check spelling
|
|
||||||
run: |
|
|
||||||
codespell README.md --ignore-words docs/codespell-ignore-words.txt
|
|
||||||
codespell docs/*.rst --ignore-words docs/codespell-ignore-words.txt
|
|
||||||
codespell datasette -S datasette/static --ignore-words docs/codespell-ignore-words.txt
|
|
||||||
codespell tests --ignore-words docs/codespell-ignore-words.txt
|
|
||||||
76
.github/workflows/stable-docs.yml
vendored
76
.github/workflows/stable-docs.yml
vendored
|
|
@ -1,76 +0,0 @@
|
||||||
name: Update Stable Docs
|
|
||||||
|
|
||||||
on:
|
|
||||||
release:
|
|
||||||
types: [published]
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
update_stable_docs:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v5
|
|
||||||
with:
|
|
||||||
fetch-depth: 0 # We need all commits to find docs/ changes
|
|
||||||
- name: Set up Git user
|
|
||||||
run: |
|
|
||||||
git config user.name "Automated"
|
|
||||||
git config user.email "actions@users.noreply.github.com"
|
|
||||||
- name: Create stable branch if it does not yet exist
|
|
||||||
run: |
|
|
||||||
if ! git ls-remote --heads origin stable | grep -qE '\bstable\b'; then
|
|
||||||
# Make sure we have all tags locally
|
|
||||||
git fetch --tags --quiet
|
|
||||||
|
|
||||||
# Latest tag that is just numbers and dots (optionally prefixed with 'v')
|
|
||||||
# e.g., 0.65.2 or v0.65.2 — excludes 1.0a20, 1.0-rc1, etc.
|
|
||||||
LATEST_RELEASE=$(
|
|
||||||
git tag -l --sort=-v:refname \
|
|
||||||
| grep -E '^v?[0-9]+(\.[0-9]+){1,3}$' \
|
|
||||||
| head -n1
|
|
||||||
)
|
|
||||||
|
|
||||||
git checkout -b stable
|
|
||||||
|
|
||||||
# If there are any stable releases, copy docs/ from the most recent
|
|
||||||
if [ -n "$LATEST_RELEASE" ]; then
|
|
||||||
rm -rf docs/
|
|
||||||
git checkout "$LATEST_RELEASE" -- docs/ || true
|
|
||||||
fi
|
|
||||||
|
|
||||||
git commit -m "Populate docs/ from $LATEST_RELEASE" || echo "No changes"
|
|
||||||
git push -u origin stable
|
|
||||||
fi
|
|
||||||
- name: Handle Release
|
|
||||||
if: github.event_name == 'release' && !github.event.release.prerelease
|
|
||||||
run: |
|
|
||||||
git fetch --all
|
|
||||||
git checkout stable
|
|
||||||
git reset --hard ${GITHUB_REF#refs/tags/}
|
|
||||||
git push origin stable --force
|
|
||||||
- name: Handle Commit to Main
|
|
||||||
if: contains(github.event.head_commit.message, '!stable-docs')
|
|
||||||
run: |
|
|
||||||
git fetch origin
|
|
||||||
git checkout -b stable origin/stable
|
|
||||||
# Get the list of modified files in docs/ from the current commit
|
|
||||||
FILES=$(git diff-tree --no-commit-id --name-only -r ${{ github.sha }} -- docs/)
|
|
||||||
# Check if the list of files is non-empty
|
|
||||||
if [[ -n "$FILES" ]]; then
|
|
||||||
# Checkout those files to the stable branch to over-write with their contents
|
|
||||||
for FILE in $FILES; do
|
|
||||||
git checkout ${{ github.sha }} -- $FILE
|
|
||||||
done
|
|
||||||
git add docs/
|
|
||||||
git commit -m "Doc changes from ${{ github.sha }}"
|
|
||||||
git push origin stable
|
|
||||||
else
|
|
||||||
echo "No changes to docs/ in this commit."
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
40
.github/workflows/test-coverage.yml
vendored
40
.github/workflows/test-coverage.yml
vendored
|
|
@ -1,40 +0,0 @@
|
||||||
name: Calculate test coverage
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
pull_request:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Check out datasette
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: '3.12'
|
|
||||||
cache: 'pip'
|
|
||||||
cache-dependency-path: '**/pyproject.toml'
|
|
||||||
- name: Install Python dependencies
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pip
|
|
||||||
python -m pip install -e .[test]
|
|
||||||
python -m pip install pytest-cov
|
|
||||||
- name: Run tests
|
|
||||||
run: |-
|
|
||||||
ls -lah
|
|
||||||
cat .coveragerc
|
|
||||||
pytest -m "not serial" --cov=datasette --cov-config=.coveragerc --cov-report xml:coverage.xml --cov-report term -x
|
|
||||||
ls -lah
|
|
||||||
- name: Upload coverage report
|
|
||||||
uses: codecov/codecov-action@v1
|
|
||||||
with:
|
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
file: coverage.xml
|
|
||||||
33
.github/workflows/test-pyodide.yml
vendored
33
.github/workflows/test-pyodide.yml
vendored
|
|
@ -1,33 +0,0 @@
|
||||||
name: Test in Pyodide with shot-scraper
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
pull_request:
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: Set up Python 3.10
|
|
||||||
uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: "3.10"
|
|
||||||
cache: 'pip'
|
|
||||||
cache-dependency-path: '**/pyproject.toml'
|
|
||||||
- name: Cache Playwright browsers
|
|
||||||
uses: actions/cache@v4
|
|
||||||
with:
|
|
||||||
path: ~/.cache/ms-playwright/
|
|
||||||
key: ${{ runner.os }}-browsers
|
|
||||||
- name: Install Playwright dependencies
|
|
||||||
run: |
|
|
||||||
pip install shot-scraper build
|
|
||||||
shot-scraper install
|
|
||||||
- name: Run test
|
|
||||||
run: |
|
|
||||||
./test-in-pyodide-with-shot-scraper.sh
|
|
||||||
53
.github/workflows/test-sqlite-support.yml
vendored
53
.github/workflows/test-sqlite-support.yml
vendored
|
|
@ -1,53 +0,0 @@
|
||||||
name: Test SQLite versions
|
|
||||||
|
|
||||||
on: [push, pull_request]
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test:
|
|
||||||
runs-on: ${{ matrix.platform }}
|
|
||||||
continue-on-error: true
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
platform: [ubuntu-latest]
|
|
||||||
python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"]
|
|
||||||
sqlite-version: [
|
|
||||||
#"3", # latest version
|
|
||||||
"3.46",
|
|
||||||
#"3.45",
|
|
||||||
#"3.27",
|
|
||||||
#"3.26",
|
|
||||||
"3.25",
|
|
||||||
#"3.25.3", # 2018-09-25, window functions breaks test_upsert for some reason on 3.10, skip for now
|
|
||||||
#"3.24", # 2018-06-04, added UPSERT support
|
|
||||||
#"3.23.1" # 2018-04-10, before UPSERT
|
|
||||||
]
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
|
||||||
uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
allow-prereleases: true
|
|
||||||
cache: pip
|
|
||||||
cache-dependency-path: pyproject.toml
|
|
||||||
- name: Set up SQLite ${{ matrix.sqlite-version }}
|
|
||||||
uses: asg017/sqlite-versions@71ea0de37ae739c33e447af91ba71dda8fcf22e6
|
|
||||||
with:
|
|
||||||
version: ${{ matrix.sqlite-version }}
|
|
||||||
cflags: "-DSQLITE_ENABLE_DESERIALIZE -DSQLITE_ENABLE_FTS5 -DSQLITE_ENABLE_FTS4 -DSQLITE_ENABLE_FTS3_PARENTHESIS -DSQLITE_ENABLE_RTREE -DSQLITE_ENABLE_JSON1"
|
|
||||||
- run: python3 -c "import sqlite3; print(sqlite3.sqlite_version)"
|
|
||||||
- run: echo $LD_LIBRARY_PATH
|
|
||||||
- name: Build extension for --load-extension test
|
|
||||||
run: |-
|
|
||||||
(cd tests && gcc ext.c -fPIC -shared -o ext.so)
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
pip install -e '.[test]'
|
|
||||||
pip freeze
|
|
||||||
- name: Run tests
|
|
||||||
run: |
|
|
||||||
pytest -n auto -m "not serial"
|
|
||||||
pytest -m "serial"
|
|
||||||
51
.github/workflows/test.yml
vendored
51
.github/workflows/test.yml
vendored
|
|
@ -1,51 +0,0 @@
|
||||||
name: Test
|
|
||||||
|
|
||||||
on: [push, pull_request]
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"]
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
|
||||||
uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
allow-prereleases: true
|
|
||||||
cache: pip
|
|
||||||
cache-dependency-path: pyproject.toml
|
|
||||||
- name: Build extension for --load-extension test
|
|
||||||
run: |-
|
|
||||||
(cd tests && gcc ext.c -fPIC -shared -o ext.so)
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
pip install -e '.[test]'
|
|
||||||
pip freeze
|
|
||||||
- name: Run tests
|
|
||||||
run: |
|
|
||||||
pytest -n auto -m "not serial"
|
|
||||||
pytest -m "serial"
|
|
||||||
# And the test that exceeds a localhost HTTPS server
|
|
||||||
tests/test_datasette_https_server.sh
|
|
||||||
- name: Install docs dependencies
|
|
||||||
run: |
|
|
||||||
pip install -e '.[docs]'
|
|
||||||
- name: Black
|
|
||||||
run: black --check .
|
|
||||||
- name: Check if cog needs to be run
|
|
||||||
run: |
|
|
||||||
cog --check docs/*.rst
|
|
||||||
- name: Check if blacken-docs needs to be run
|
|
||||||
run: |
|
|
||||||
# This fails on syntax errors, or a diff was applied
|
|
||||||
blacken-docs -l 60 docs/*.rst
|
|
||||||
- name: Test DATASETTE_LOAD_PLUGINS
|
|
||||||
run: |
|
|
||||||
pip install datasette-init datasette-json-html
|
|
||||||
tests/test-datasette-load-plugins.sh
|
|
||||||
15
.github/workflows/tmate-mac.yml
vendored
15
.github/workflows/tmate-mac.yml
vendored
|
|
@ -1,15 +0,0 @@
|
||||||
name: tmate session mac
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: macos-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Setup tmate session
|
|
||||||
uses: mxschmitt/action-tmate@v3
|
|
||||||
18
.github/workflows/tmate.yml
vendored
18
.github/workflows/tmate.yml
vendored
|
|
@ -1,18 +0,0 @@
|
||||||
name: tmate session
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
models: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Setup tmate session
|
|
||||||
uses: mxschmitt/action-tmate@v3
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
11
.gitignore
vendored
11
.gitignore
vendored
|
|
@ -5,9 +5,6 @@ scratchpad
|
||||||
|
|
||||||
.vscode
|
.vscode
|
||||||
|
|
||||||
uv.lock
|
|
||||||
data.db
|
|
||||||
|
|
||||||
# We don't use Pipfile, so ignore them
|
# We don't use Pipfile, so ignore them
|
||||||
Pipfile
|
Pipfile
|
||||||
Pipfile.lock
|
Pipfile.lock
|
||||||
|
|
@ -119,11 +116,3 @@ ENV/
|
||||||
|
|
||||||
# macOS files
|
# macOS files
|
||||||
.DS_Store
|
.DS_Store
|
||||||
node_modules
|
|
||||||
.*.swp
|
|
||||||
|
|
||||||
# In case someone compiled tests/ext.c for test_load_extensions, don't
|
|
||||||
# include it in source control.
|
|
||||||
tests/*.dylib
|
|
||||||
tests/*.so
|
|
||||||
tests/*.dll
|
|
||||||
|
|
|
||||||
|
|
@ -1,4 +0,0 @@
|
||||||
{
|
|
||||||
"tabWidth": 2,
|
|
||||||
"useTabs": false
|
|
||||||
}
|
|
||||||
|
|
@ -1,16 +0,0 @@
|
||||||
version: 2
|
|
||||||
|
|
||||||
build:
|
|
||||||
os: ubuntu-20.04
|
|
||||||
tools:
|
|
||||||
python: "3.11"
|
|
||||||
|
|
||||||
sphinx:
|
|
||||||
configuration: docs/conf.py
|
|
||||||
|
|
||||||
python:
|
|
||||||
install:
|
|
||||||
- method: pip
|
|
||||||
path: .
|
|
||||||
extra_requirements:
|
|
||||||
- docs
|
|
||||||
52
.travis.yml
Normal file
52
.travis.yml
Normal file
|
|
@ -0,0 +1,52 @@
|
||||||
|
language: python
|
||||||
|
dist: xenial
|
||||||
|
|
||||||
|
# 3.6 is listed first so it gets used for the later build stages
|
||||||
|
python:
|
||||||
|
- "3.6"
|
||||||
|
- "3.7"
|
||||||
|
- "3.5"
|
||||||
|
|
||||||
|
# Executed for 3.5 AND 3.5 as the first "test" stage:
|
||||||
|
script:
|
||||||
|
- pip install -U pip wheel
|
||||||
|
- pip install .[test]
|
||||||
|
- pytest
|
||||||
|
|
||||||
|
cache:
|
||||||
|
directories:
|
||||||
|
- $HOME/.cache/pip
|
||||||
|
|
||||||
|
# This defines further stages that execute after the tests
|
||||||
|
jobs:
|
||||||
|
include:
|
||||||
|
- stage: deploy latest.datasette.io
|
||||||
|
if: branch = master AND type = push
|
||||||
|
script:
|
||||||
|
- pip install .[test]
|
||||||
|
- npm install -g now
|
||||||
|
- python tests/fixtures.py fixtures.db fixtures.json
|
||||||
|
- export ALIAS=`echo $TRAVIS_COMMIT | cut -c 1-7`
|
||||||
|
- datasette publish nowv1 fixtures.db -m fixtures.json --token=$NOW_TOKEN --branch=$TRAVIS_COMMIT --version-note=$TRAVIS_COMMIT --name=datasette-latest-$ALIAS --alias=latest.datasette.io --alias=$ALIAS.datasette.io
|
||||||
|
- stage: release tagged version
|
||||||
|
if: tag IS present
|
||||||
|
python: 3.6
|
||||||
|
script:
|
||||||
|
- npm install -g now
|
||||||
|
- export ALIAS=`echo $TRAVIS_COMMIT | cut -c 1-7`
|
||||||
|
- export TAG=`echo $TRAVIS_TAG | sed 's/\./-/g' | sed 's/.*/v&/'`
|
||||||
|
- now alias $ALIAS.datasette.io $TAG.datasette.io --token=$NOW_TOKEN
|
||||||
|
# Build and release to Docker Hub
|
||||||
|
- docker login -u $DOCKER_USER -p $DOCKER_PASS
|
||||||
|
- export REPO=datasetteproject/datasette
|
||||||
|
- docker build -f Dockerfile -t $REPO:$TRAVIS_TAG .
|
||||||
|
- docker tag $REPO:$TRAVIS_TAG $REPO:latest
|
||||||
|
- docker push $REPO
|
||||||
|
deploy:
|
||||||
|
- provider: pypi
|
||||||
|
user: simonw
|
||||||
|
distributions: bdist_wheel
|
||||||
|
password: ${PYPI_PASSWORD}
|
||||||
|
on:
|
||||||
|
branch: master
|
||||||
|
tags: true
|
||||||
|
|
@ -1,128 +0,0 @@
|
||||||
# Contributor Covenant Code of Conduct
|
|
||||||
|
|
||||||
## Our Pledge
|
|
||||||
|
|
||||||
We as members, contributors, and leaders pledge to make participation in our
|
|
||||||
community a harassment-free experience for everyone, regardless of age, body
|
|
||||||
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
|
||||||
identity and expression, level of experience, education, socio-economic status,
|
|
||||||
nationality, personal appearance, race, religion, or sexual identity
|
|
||||||
and orientation.
|
|
||||||
|
|
||||||
We pledge to act and interact in ways that contribute to an open, welcoming,
|
|
||||||
diverse, inclusive, and healthy community.
|
|
||||||
|
|
||||||
## Our Standards
|
|
||||||
|
|
||||||
Examples of behavior that contributes to a positive environment for our
|
|
||||||
community include:
|
|
||||||
|
|
||||||
* Demonstrating empathy and kindness toward other people
|
|
||||||
* Being respectful of differing opinions, viewpoints, and experiences
|
|
||||||
* Giving and gracefully accepting constructive feedback
|
|
||||||
* Accepting responsibility and apologizing to those affected by our mistakes,
|
|
||||||
and learning from the experience
|
|
||||||
* Focusing on what is best not just for us as individuals, but for the
|
|
||||||
overall community
|
|
||||||
|
|
||||||
Examples of unacceptable behavior include:
|
|
||||||
|
|
||||||
* The use of sexualized language or imagery, and sexual attention or
|
|
||||||
advances of any kind
|
|
||||||
* Trolling, insulting or derogatory comments, and personal or political attacks
|
|
||||||
* Public or private harassment
|
|
||||||
* Publishing others' private information, such as a physical or email
|
|
||||||
address, without their explicit permission
|
|
||||||
* Other conduct which could reasonably be considered inappropriate in a
|
|
||||||
professional setting
|
|
||||||
|
|
||||||
## Enforcement Responsibilities
|
|
||||||
|
|
||||||
Community leaders are responsible for clarifying and enforcing our standards of
|
|
||||||
acceptable behavior and will take appropriate and fair corrective action in
|
|
||||||
response to any behavior that they deem inappropriate, threatening, offensive,
|
|
||||||
or harmful.
|
|
||||||
|
|
||||||
Community leaders have the right and responsibility to remove, edit, or reject
|
|
||||||
comments, commits, code, wiki edits, issues, and other contributions that are
|
|
||||||
not aligned to this Code of Conduct, and will communicate reasons for moderation
|
|
||||||
decisions when appropriate.
|
|
||||||
|
|
||||||
## Scope
|
|
||||||
|
|
||||||
This Code of Conduct applies within all community spaces, and also applies when
|
|
||||||
an individual is officially representing the community in public spaces.
|
|
||||||
Examples of representing our community include using an official e-mail address,
|
|
||||||
posting via an official social media account, or acting as an appointed
|
|
||||||
representative at an online or offline event.
|
|
||||||
|
|
||||||
## Enforcement
|
|
||||||
|
|
||||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
|
||||||
reported to the community leaders responsible for enforcement at
|
|
||||||
`swillison+datasette-code-of-conduct@gmail.com`.
|
|
||||||
All complaints will be reviewed and investigated promptly and fairly.
|
|
||||||
|
|
||||||
All community leaders are obligated to respect the privacy and security of the
|
|
||||||
reporter of any incident.
|
|
||||||
|
|
||||||
## Enforcement Guidelines
|
|
||||||
|
|
||||||
Community leaders will follow these Community Impact Guidelines in determining
|
|
||||||
the consequences for any action they deem in violation of this Code of Conduct:
|
|
||||||
|
|
||||||
### 1. Correction
|
|
||||||
|
|
||||||
**Community Impact**: Use of inappropriate language or other behavior deemed
|
|
||||||
unprofessional or unwelcome in the community.
|
|
||||||
|
|
||||||
**Consequence**: A private, written warning from community leaders, providing
|
|
||||||
clarity around the nature of the violation and an explanation of why the
|
|
||||||
behavior was inappropriate. A public apology may be requested.
|
|
||||||
|
|
||||||
### 2. Warning
|
|
||||||
|
|
||||||
**Community Impact**: A violation through a single incident or series
|
|
||||||
of actions.
|
|
||||||
|
|
||||||
**Consequence**: A warning with consequences for continued behavior. No
|
|
||||||
interaction with the people involved, including unsolicited interaction with
|
|
||||||
those enforcing the Code of Conduct, for a specified period of time. This
|
|
||||||
includes avoiding interactions in community spaces as well as external channels
|
|
||||||
like social media. Violating these terms may lead to a temporary or
|
|
||||||
permanent ban.
|
|
||||||
|
|
||||||
### 3. Temporary Ban
|
|
||||||
|
|
||||||
**Community Impact**: A serious violation of community standards, including
|
|
||||||
sustained inappropriate behavior.
|
|
||||||
|
|
||||||
**Consequence**: A temporary ban from any sort of interaction or public
|
|
||||||
communication with the community for a specified period of time. No public or
|
|
||||||
private interaction with the people involved, including unsolicited interaction
|
|
||||||
with those enforcing the Code of Conduct, is allowed during this period.
|
|
||||||
Violating these terms may lead to a permanent ban.
|
|
||||||
|
|
||||||
### 4. Permanent Ban
|
|
||||||
|
|
||||||
**Community Impact**: Demonstrating a pattern of violation of community
|
|
||||||
standards, including sustained inappropriate behavior, harassment of an
|
|
||||||
individual, or aggression toward or disparagement of classes of individuals.
|
|
||||||
|
|
||||||
**Consequence**: A permanent ban from any sort of public interaction within
|
|
||||||
the community.
|
|
||||||
|
|
||||||
## Attribution
|
|
||||||
|
|
||||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
|
||||||
version 2.0, available at
|
|
||||||
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
|
|
||||||
|
|
||||||
Community Impact Guidelines were inspired by [Mozilla's code of conduct
|
|
||||||
enforcement ladder](https://github.com/mozilla/diversity).
|
|
||||||
|
|
||||||
[homepage]: https://www.contributor-covenant.org
|
|
||||||
|
|
||||||
For answers to common questions about this code of conduct, see the FAQ at
|
|
||||||
https://www.contributor-covenant.org/faq. Translations are available at
|
|
||||||
https://www.contributor-covenant.org/translations.
|
|
||||||
48
Dockerfile
48
Dockerfile
|
|
@ -1,18 +1,42 @@
|
||||||
FROM python:3.11.0-slim-bullseye as build
|
FROM python:3.7.2-slim-stretch as build
|
||||||
|
|
||||||
# Version of Datasette to install, e.g. 0.55
|
# Setup build dependencies
|
||||||
# docker build . -t datasette --build-arg VERSION=0.55
|
RUN apt update \
|
||||||
ARG VERSION
|
&& apt install -y python3-dev build-essential wget libxml2-dev libproj-dev libgeos-dev libsqlite3-dev zlib1g-dev pkg-config git \
|
||||||
|
&& apt clean
|
||||||
|
|
||||||
RUN apt-get update && \
|
|
||||||
apt-get install -y --no-install-recommends libsqlite3-mod-spatialite && \
|
|
||||||
apt clean && \
|
|
||||||
rm -rf /var/lib/apt && \
|
|
||||||
rm -rf /var/lib/dpkg/info/*
|
|
||||||
|
|
||||||
RUN pip install https://github.com/simonw/datasette/archive/refs/tags/${VERSION}.zip && \
|
RUN wget "https://www.sqlite.org/2018/sqlite-autoconf-3260000.tar.gz" && tar xzf sqlite-autoconf-3260000.tar.gz \
|
||||||
find /usr/local/lib -name '__pycache__' | xargs rm -r && \
|
&& cd sqlite-autoconf-3260000 && ./configure --disable-static --enable-fts5 --enable-json1 CFLAGS="-g -O2 -DSQLITE_ENABLE_FTS3=1 -DSQLITE_ENABLE_FTS4=1 -DSQLITE_ENABLE_RTREE=1 -DSQLITE_ENABLE_JSON1" \
|
||||||
rm -rf /root/.cache/pip
|
&& make && make install
|
||||||
|
|
||||||
|
RUN wget "https://www.gaia-gis.it/gaia-sins/freexl-1.0.5.tar.gz" && tar zxf freexl-1.0.5.tar.gz \
|
||||||
|
&& cd freexl-1.0.5 && ./configure && make && make install
|
||||||
|
|
||||||
|
RUN wget "https://www.gaia-gis.it/gaia-sins/libspatialite-4.4.0-RC0.tar.gz" && tar zxf libspatialite-4.4.0-RC0.tar.gz \
|
||||||
|
&& cd libspatialite-4.4.0-RC0 && ./configure && make && make install
|
||||||
|
|
||||||
|
RUN wget "https://www.gaia-gis.it/gaia-sins/readosm-1.1.0.tar.gz" && tar zxf readosm-1.1.0.tar.gz && cd readosm-1.1.0 && ./configure && make && make install
|
||||||
|
|
||||||
|
RUN wget "https://www.gaia-gis.it/gaia-sins/spatialite-tools-4.4.0-RC0.tar.gz" && tar zxf spatialite-tools-4.4.0-RC0.tar.gz \
|
||||||
|
&& cd spatialite-tools-4.4.0-RC0 && ./configure && make && make install
|
||||||
|
|
||||||
|
|
||||||
|
# Add local code to the image instead of fetching from pypi.
|
||||||
|
COPY . /datasette
|
||||||
|
|
||||||
|
RUN pip install /datasette
|
||||||
|
|
||||||
|
FROM python:3.7.2-slim-stretch
|
||||||
|
|
||||||
|
# Copy python dependencies and spatialite libraries
|
||||||
|
COPY --from=build /usr/local/lib/ /usr/local/lib/
|
||||||
|
# Copy executables
|
||||||
|
COPY --from=build /usr/local/bin /usr/local/bin
|
||||||
|
# Copy spatial extensions
|
||||||
|
COPY --from=build /usr/lib/x86_64-linux-gnu /usr/lib/x86_64-linux-gnu
|
||||||
|
|
||||||
|
ENV LD_LIBRARY_PATH=/usr/local/lib
|
||||||
|
|
||||||
EXPOSE 8001
|
EXPOSE 8001
|
||||||
CMD ["datasette"]
|
CMD ["datasette"]
|
||||||
|
|
|
||||||
56
Justfile
56
Justfile
|
|
@ -1,56 +0,0 @@
|
||||||
export DATASETTE_SECRET := "not_a_secret"
|
|
||||||
|
|
||||||
# Run tests and linters
|
|
||||||
@default: test lint
|
|
||||||
|
|
||||||
# Setup project
|
|
||||||
@init:
|
|
||||||
uv sync --extra test --extra docs
|
|
||||||
|
|
||||||
# Run pytest with supplied options
|
|
||||||
@test *options: init
|
|
||||||
uv run pytest -n auto {{options}}
|
|
||||||
|
|
||||||
@codespell:
|
|
||||||
uv run codespell README.md --ignore-words docs/codespell-ignore-words.txt
|
|
||||||
uv run codespell docs/*.rst --ignore-words docs/codespell-ignore-words.txt
|
|
||||||
uv run codespell datasette -S datasette/static --ignore-words docs/codespell-ignore-words.txt
|
|
||||||
uv run codespell tests --ignore-words docs/codespell-ignore-words.txt
|
|
||||||
|
|
||||||
# Run linters: black, flake8, mypy, cog
|
|
||||||
@lint: codespell
|
|
||||||
uv run black . --check
|
|
||||||
uv run flake8
|
|
||||||
uv run --extra test cog --check README.md docs/*.rst
|
|
||||||
|
|
||||||
# Rebuild docs with cog
|
|
||||||
@cog:
|
|
||||||
uv run --extra test cog -r README.md docs/*.rst
|
|
||||||
|
|
||||||
# Serve live docs on localhost:8000
|
|
||||||
@docs: cog blacken-docs
|
|
||||||
uv run --extra docs make -C docs livehtml
|
|
||||||
|
|
||||||
# Build docs as static HTML
|
|
||||||
@docs-build: cog blacken-docs
|
|
||||||
rm -rf docs/_build && cd docs && uv run make html
|
|
||||||
|
|
||||||
# Apply Black
|
|
||||||
@black:
|
|
||||||
uv run black .
|
|
||||||
|
|
||||||
# Apply blacken-docs
|
|
||||||
@blacken-docs:
|
|
||||||
uv run blacken-docs -l 60 docs/*.rst
|
|
||||||
|
|
||||||
# Apply prettier
|
|
||||||
@prettier:
|
|
||||||
npm run fix
|
|
||||||
|
|
||||||
# Format code with both black and prettier
|
|
||||||
@format: black prettier blacken-docs
|
|
||||||
|
|
||||||
@serve *options:
|
|
||||||
uv run sqlite-utils create-database data.db
|
|
||||||
uv run sqlite-utils create-table data.db docs id integer title text --pk id --ignore
|
|
||||||
uv run python -m datasette data.db --root --reload {{options}}
|
|
||||||
|
|
@ -1,5 +1,3 @@
|
||||||
recursive-include datasette/static *
|
recursive-include datasette/static *
|
||||||
recursive-include datasette/templates *
|
|
||||||
include versioneer.py
|
include versioneer.py
|
||||||
include datasette/_version.py
|
include datasette/_version.py
|
||||||
include LICENSE
|
|
||||||
|
|
|
||||||
118
README.md
118
README.md
|
|
@ -1,42 +1,69 @@
|
||||||
<img src="https://datasette.io/static/datasette-logo.svg" alt="Datasette">
|
# Datasette
|
||||||
|
|
||||||
[](https://pypi.org/project/datasette/)
|
[](https://pypi.org/project/datasette/)
|
||||||
[](https://docs.datasette.io/en/latest/changelog.html)
|
[](https://travis-ci.org/simonw/datasette)
|
||||||
[](https://pypi.org/project/datasette/)
|
[](http://datasette.readthedocs.io/en/latest/?badge=latest)
|
||||||
[](https://github.com/simonw/datasette/actions?query=workflow%3ATest)
|
[](https://github.com/simonw/datasette/blob/master/LICENSE)
|
||||||
[](https://docs.datasette.io/en/latest/?badge=latest)
|
[](https://black.readthedocs.io/en/stable/)
|
||||||
[](https://github.com/simonw/datasette/blob/main/LICENSE)
|
|
||||||
[](https://hub.docker.com/r/datasetteproject/datasette)
|
|
||||||
[](https://datasette.io/discord)
|
|
||||||
|
|
||||||
*An open source multi-tool for exploring and publishing data*
|
*A tool for exploring and publishing data*
|
||||||
|
|
||||||
Datasette is a tool for exploring and publishing data. It helps people take data of any shape or size and publish that as an interactive, explorable website and accompanying API.
|
Datasette is a tool for exploring and publishing data. It helps people take data of any shape or size and publish that as an interactive, explorable website and accompanying API.
|
||||||
|
|
||||||
Datasette is aimed at data journalists, museum curators, archivists, local governments, scientists, researchers and anyone else who has data that they wish to share with the world.
|
Datasette is aimed at data journalists, museum curators, archivists, local governments and anyone else who has data that they wish to share with the world.
|
||||||
|
|
||||||
[Explore a demo](https://datasette.io/global-power-plants/global-power-plants), watch [a video about the project](https://simonwillison.net/2021/Feb/7/video/) or try it out [on GitHub Codespaces](https://github.com/datasette/datasette-studio).
|
[Explore a demo](https://fivethirtyeight.datasettes.com/fivethirtyeight), watch [a video about the project](https://www.youtube.com/watch?v=pTr1uLQTJNE) or try it out by [uploading and publishing your own CSV data](https://simonwillison.net/2019/Apr/23/datasette-glitch/).
|
||||||
|
|
||||||
* [datasette.io](https://datasette.io/) is the official project website
|
* Comprehensive documentation: http://datasette.readthedocs.io/
|
||||||
* Latest [Datasette News](https://datasette.io/news)
|
* Examples: https://github.com/simonw/datasette/wiki/Datasettes
|
||||||
* Comprehensive documentation: https://docs.datasette.io/
|
* Live demo of current master: https://latest.datasette.io/
|
||||||
* Examples: https://datasette.io/examples
|
|
||||||
* Live demo of current `main` branch: https://latest.datasette.io/
|
|
||||||
* Questions, feedback or want to talk about the project? Join our [Discord](https://datasette.io/discord)
|
|
||||||
|
|
||||||
Want to stay up-to-date with the project? Subscribe to the [Datasette newsletter](https://datasette.substack.com/) for tips, tricks and news on what's new in the Datasette ecosystem.
|
## News
|
||||||
|
|
||||||
|
* 23rd June 2019: [Porting Datasette to ASGI, and Turtles all the way down](https://simonwillison.net/2019/Jun/23/datasette-asgi/)
|
||||||
|
* 21st May 2019: The anonymized raw data from [the Stack Overflow Developer Survey 2019](https://stackoverflow.blog/2019/05/21/public-data-release-of-stack-overflows-2019-developer-survey/) has been [published in partnership with Glitch](https://glitch.com/culture/discover-insights-explore-developer-survey-results-2019/), powered by Datasette.
|
||||||
|
* 19th May 2019: [Datasette 0.28](https://datasette.readthedocs.io/en/stable/changelog.html#v0-28) - a salmagundi of new features!
|
||||||
|
* No longer immutable! Datasette now supports [databases that change](https://datasette.readthedocs.io/en/stable/changelog.html#supporting-databases-that-change).
|
||||||
|
* [Faceting improvements](https://datasette.readthedocs.io/en/stable/changelog.html#faceting-improvements-and-faceting-plugins) including facet-by-JSON-array and the ability to define custom faceting using plugins.
|
||||||
|
* [datasette publish cloudrun](https://datasette.readthedocs.io/en/stable/changelog.html#datasette-publish-cloudrun) lets you publish databases to Google's new Cloud Run hosting service.
|
||||||
|
* New [register_output_renderer](https://datasette.readthedocs.io/en/stable/changelog.html#register-output-renderer-plugins) plugin hook for adding custom output extensions to Datasette in addition to the default `.json` and `.csv`.
|
||||||
|
* Dozens of other smaller features and tweaks - see [the release notes](https://datasette.readthedocs.io/en/stable/changelog.html#v0-28) for full details.
|
||||||
|
* Read more about this release here: [Datasette 0.28—and why master should always be releasable](https://simonwillison.net/2019/May/19/datasette-0-28/)
|
||||||
|
* 24th February 2019: [
|
||||||
|
sqlite-utils: a Python library and CLI tool for building SQLite databases](https://simonwillison.net/2019/Feb/25/sqlite-utils/) - a partner tool for easily creating SQLite databases for use with Datasette.
|
||||||
|
* 31st Janary 2019: [Datasette 0.27](https://datasette.readthedocs.io/en/latest/changelog.html#v0-27) - `datasette plugins` command, newline-delimited JSON export option, new documentation on [The Datasette Ecosystem](https://datasette.readthedocs.io/en/latest/ecosystem.html).
|
||||||
|
* 10th January 2019: [Datasette 0.26.1](http://datasette.readthedocs.io/en/latest/changelog.html#v0-26-1) - SQLite upgrade in Docker image, `/-/versions` now shows SQLite compile options.
|
||||||
|
* 2nd January 2019: [Datasette 0.26](http://datasette.readthedocs.io/en/latest/changelog.html#v0-26) - minor bug fixes, `datasette publish now --alias` argument.
|
||||||
|
* 18th December 2018: [Fast Autocomplete Search for Your Website](https://24ways.org/2018/fast-autocomplete-search-for-your-website/) - a new tutorial on using Datasette to build a JavaScript autocomplete search engine.
|
||||||
|
* 3rd October 2018: [The interesting ideas in Datasette](https://simonwillison.net/2018/Oct/4/datasette-ideas/) - a write-up of some of the less obvious interesting ideas embedded in the Datasette project.
|
||||||
|
* 19th September 2018: [Datasette 0.25](http://datasette.readthedocs.io/en/latest/changelog.html#v0-25) - New plugin hooks, improved database view support and an easier way to use more recent versions of SQLite.
|
||||||
|
* 23rd July 2018: [Datasette 0.24](http://datasette.readthedocs.io/en/latest/changelog.html#v0-24) - a number of small new features
|
||||||
|
* 29th June 2018: [datasette-vega](https://github.com/simonw/datasette-vega), a new plugin for visualizing data as bar, line or scatter charts
|
||||||
|
* 21st June 2018: [Datasette 0.23.1](http://datasette.readthedocs.io/en/latest/changelog.html#v0-23-1) - minor bug fixes
|
||||||
|
* 18th June 2018: [Datasette 0.23: CSV, SpatiaLite and more](http://datasette.readthedocs.io/en/latest/changelog.html#v0-23) - CSV export, foreign key expansion in JSON and CSV, new config options, improved support for SpatiaLite and a bunch of other improvements
|
||||||
|
* 23rd May 2018: [Datasette 0.22.1 bugfix](https://github.com/simonw/datasette/releases/tag/0.22.1) plus we now use [versioneer](https://github.com/warner/python-versioneer)
|
||||||
|
* 20th May 2018: [Datasette 0.22: Datasette Facets](https://simonwillison.net/2018/May/20/datasette-facets)
|
||||||
|
* 5th May 2018: [Datasette 0.21: New _shape=, new _size=, search within columns](https://github.com/simonw/datasette/releases/tag/0.21)
|
||||||
|
* 25th April 2018: [Exploring the UK Register of Members Interests with SQL and Datasette](https://simonwillison.net/2018/Apr/25/register-members-interests/) - a tutorial describing how [register-of-members-interests.datasettes.com](https://register-of-members-interests.datasettes.com/) was built ([source code here](https://github.com/simonw/register-of-members-interests))
|
||||||
|
* 20th April 2018: [Datasette plugins, and building a clustered map visualization](https://simonwillison.net/2018/Apr/20/datasette-plugins/) - introducing Datasette's new plugin system and [datasette-cluster-map](https://pypi.org/project/datasette-cluster-map/), a plugin for visualizing data on a map
|
||||||
|
* 20th April 2018: [Datasette 0.20: static assets and templates for plugins](https://github.com/simonw/datasette/releases/tag/0.20)
|
||||||
|
* 16th April 2018: [Datasette 0.19: plugins preview](https://github.com/simonw/datasette/releases/tag/0.19)
|
||||||
|
* 14th April 2018: [Datasette 0.18: units](https://github.com/simonw/datasette/releases/tag/0.18)
|
||||||
|
* 9th April 2018: [Datasette 0.15: sort by column](https://github.com/simonw/datasette/releases/tag/0.15)
|
||||||
|
* 28th March 2018: [Baltimore Sun Public Salary Records](https://simonwillison.net/2018/Mar/28/datasette-in-the-wild/) - a data journalism project from the Baltimore Sun powered by Datasette - source code [is available here](https://github.com/baltimore-sun-data/salaries-datasette)
|
||||||
|
* 27th March 2018: [Cloud-first: Rapid webapp deployment using containers](https://wwwf.imperial.ac.uk/blog/research-software-engineering/2018/03/27/cloud-first-rapid-webapp-deployment-using-containers/) - a tutorial covering deploying Datasette using Microsoft Azure by the Research Software Engineering team at Imperial College London
|
||||||
|
* 28th January 2018: [Analyzing my Twitter followers with Datasette](https://simonwillison.net/2018/Jan/28/analyzing-my-twitter-followers/) - a tutorial on using Datasette to analyze follower data pulled from the Twitter API
|
||||||
|
* 17th January 2018: [Datasette Publish: a web app for publishing CSV files as an online database](https://simonwillison.net/2018/Jan/17/datasette-publish/)
|
||||||
|
* 12th December 2017: [Building a location to time zone API with SpatiaLite, OpenStreetMap and Datasette](https://simonwillison.net/2017/Dec/12/building-a-location-time-zone-api/)
|
||||||
|
* 9th December 2017: [Datasette 0.14: customization edition](https://github.com/simonw/datasette/releases/tag/0.14)
|
||||||
|
* 25th November 2017: [New in Datasette: filters, foreign keys and search](https://simonwillison.net/2017/Nov/25/new-in-datasette/)
|
||||||
|
* 13th November 2017: [Datasette: instantly create and publish an API for your SQLite databases](https://simonwillison.net/2017/Nov/13/datasette/)
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
If you are on a Mac, [Homebrew](https://brew.sh/) is the easiest way to install Datasette:
|
pip3 install datasette
|
||||||
|
|
||||||
brew install datasette
|
Datasette requires Python 3.5 or higher. We also have [detailed installation instructions](https://datasette.readthedocs.io/en/stable/installation.html) covering other options such as Docker.
|
||||||
|
|
||||||
You can also install it using `pip` or `pipx`:
|
|
||||||
|
|
||||||
pip install datasette
|
|
||||||
|
|
||||||
Datasette requires Python 3.8 or higher. We also have [detailed installation instructions](https://docs.datasette.io/en/stable/installation.html) covering other options such as Docker.
|
|
||||||
|
|
||||||
## Basic usage
|
## Basic usage
|
||||||
|
|
||||||
|
|
@ -48,12 +75,41 @@ This will start a web server on port 8001 - visit http://localhost:8001/ to acce
|
||||||
|
|
||||||
Use Chrome on OS X? You can run datasette against your browser history like so:
|
Use Chrome on OS X? You can run datasette against your browser history like so:
|
||||||
|
|
||||||
datasette ~/Library/Application\ Support/Google/Chrome/Default/History --nolock
|
datasette ~/Library/Application\ Support/Google/Chrome/Default/History
|
||||||
|
|
||||||
Now visiting http://localhost:8001/History/downloads will show you a web interface to browse your downloads data:
|
Now visiting http://localhost:8001/History/downloads will show you a web interface to browse your downloads data:
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
## datasette serve options
|
||||||
|
|
||||||
|
$ datasette serve --help
|
||||||
|
|
||||||
|
Usage: datasette serve [OPTIONS] [FILES]...
|
||||||
|
|
||||||
|
Serve up specified SQLite database files with a web UI
|
||||||
|
|
||||||
|
Options:
|
||||||
|
-i, --immutable PATH Database files to open in immutable mode
|
||||||
|
-h, --host TEXT host for server, defaults to 127.0.0.1
|
||||||
|
-p, --port INTEGER port for server, defaults to 8001
|
||||||
|
--debug Enable debug mode - useful for development
|
||||||
|
--reload Automatically reload if database or code change detected -
|
||||||
|
useful for development
|
||||||
|
--cors Enable CORS by serving Access-Control-Allow-Origin: *
|
||||||
|
--load-extension PATH Path to a SQLite extension to load
|
||||||
|
--inspect-file TEXT Path to JSON file created using "datasette inspect"
|
||||||
|
-m, --metadata FILENAME Path to JSON file containing license/source metadata
|
||||||
|
--template-dir DIRECTORY Path to directory containing custom templates
|
||||||
|
--plugins-dir DIRECTORY Path to directory containing custom plugins
|
||||||
|
--static STATIC MOUNT mountpoint:path-to-directory for serving static files
|
||||||
|
--memory Make :memory: database available
|
||||||
|
--config CONFIG Set config option using configname:value
|
||||||
|
datasette.readthedocs.io/en/latest/config.html
|
||||||
|
--version-note TEXT Additional note to show on /-/versions
|
||||||
|
--help-config Show available config options
|
||||||
|
--help Show this message and exit.
|
||||||
|
|
||||||
## metadata.json
|
## metadata.json
|
||||||
|
|
||||||
If you want to include licensing and source information in the generated datasette website you can do so using a JSON file that looks something like this:
|
If you want to include licensing and source information in the generated datasette website you can do so using a JSON file that looks something like this:
|
||||||
|
|
@ -74,7 +130,7 @@ The license and source information will be displayed on the index page and in th
|
||||||
|
|
||||||
## datasette publish
|
## datasette publish
|
||||||
|
|
||||||
If you have [Heroku](https://heroku.com/) or [Google Cloud Run](https://cloud.google.com/run/) configured, Datasette can deploy one or more SQLite databases to the internet with a single command:
|
If you have [Heroku](https://heroku.com/), [Google Cloud Run](https://cloud.google.com/run/) or [Zeit Now v1](https://zeit.co/now) configured, Datasette can deploy one or more SQLite databases to the internet with a single command:
|
||||||
|
|
||||||
datasette publish heroku database.db
|
datasette publish heroku database.db
|
||||||
|
|
||||||
|
|
@ -84,8 +140,4 @@ Or:
|
||||||
|
|
||||||
This will create a docker image containing both the datasette application and the specified SQLite database files. It will then deploy that image to Heroku or Cloud Run and give you a URL to access the resulting website and API.
|
This will create a docker image containing both the datasette application and the specified SQLite database files. It will then deploy that image to Heroku or Cloud Run and give you a URL to access the resulting website and API.
|
||||||
|
|
||||||
See [Publishing data](https://docs.datasette.io/en/stable/publish.html) in the documentation for more details.
|
See [Publishing data](https://datasette.readthedocs.io/en/stable/publish.html) in the documentation for more details.
|
||||||
|
|
||||||
## Datasette Lite
|
|
||||||
|
|
||||||
[Datasette Lite](https://lite.datasette.io/) is Datasette packaged using WebAssembly so that it runs entirely in your browser, no Python web application server required. Read more about that in the [Datasette Lite documentation](https://github.com/simonw/datasette-lite/blob/main/README.md).
|
|
||||||
|
|
|
||||||
1
_config.yml
Normal file
1
_config.yml
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
theme: jekyll-theme-architect
|
||||||
|
|
@ -1,8 +0,0 @@
|
||||||
coverage:
|
|
||||||
status:
|
|
||||||
project:
|
|
||||||
default:
|
|
||||||
informational: true
|
|
||||||
patch:
|
|
||||||
default:
|
|
||||||
informational: true
|
|
||||||
|
|
@ -1,8 +1,3 @@
|
||||||
from datasette.permissions import Permission # noqa
|
|
||||||
from datasette.version import __version_info__, __version__ # noqa
|
from datasette.version import __version_info__, __version__ # noqa
|
||||||
from datasette.events import Event # noqa
|
|
||||||
from datasette.utils.asgi import Forbidden, NotFound, Request, Response # noqa
|
|
||||||
from datasette.utils import actor_matches_allow # noqa
|
|
||||||
from datasette.views import Context # noqa
|
|
||||||
from .hookspecs import hookimpl # noqa
|
from .hookspecs import hookimpl # noqa
|
||||||
from .hookspecs import hookspec # noqa
|
from .hookspecs import hookspec # noqa
|
||||||
|
|
|
||||||
|
|
@ -1,4 +0,0 @@
|
||||||
from datasette.cli import cli
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
cli()
|
|
||||||
556
datasette/_version.py
Normal file
556
datasette/_version.py
Normal file
|
|
@ -0,0 +1,556 @@
|
||||||
|
# This file helps to compute a version number in source trees obtained from
|
||||||
|
# git-archive tarball (such as those provided by githubs download-from-tag
|
||||||
|
# feature). Distribution tarballs (built by setup.py sdist) and build
|
||||||
|
# directories (produced by setup.py build) will contain a much shorter file
|
||||||
|
# that just contains the computed version number.
|
||||||
|
|
||||||
|
# This file is released into the public domain. Generated by
|
||||||
|
# versioneer-0.18 (https://github.com/warner/python-versioneer)
|
||||||
|
|
||||||
|
"""Git implementation of _version.py."""
|
||||||
|
|
||||||
|
import errno
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def get_keywords():
|
||||||
|
"""Get the keywords needed to look up the version information."""
|
||||||
|
# these strings will be replaced by git during git-archive.
|
||||||
|
# setup.py/versioneer.py will grep for the variable names, so they must
|
||||||
|
# each be defined on a line of their own. _version.py will just call
|
||||||
|
# get_keywords().
|
||||||
|
git_refnames = "$Format:%d$"
|
||||||
|
git_full = "$Format:%H$"
|
||||||
|
git_date = "$Format:%ci$"
|
||||||
|
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
|
||||||
|
return keywords
|
||||||
|
|
||||||
|
|
||||||
|
class VersioneerConfig:
|
||||||
|
"""Container for Versioneer configuration parameters."""
|
||||||
|
|
||||||
|
|
||||||
|
def get_config():
|
||||||
|
"""Create, populate and return the VersioneerConfig() object."""
|
||||||
|
# these strings are filled in when 'setup.py versioneer' creates
|
||||||
|
# _version.py
|
||||||
|
cfg = VersioneerConfig()
|
||||||
|
cfg.VCS = "git"
|
||||||
|
cfg.style = "pep440"
|
||||||
|
cfg.tag_prefix = ""
|
||||||
|
cfg.parentdir_prefix = "datasette-"
|
||||||
|
cfg.versionfile_source = "datasette/_version.py"
|
||||||
|
cfg.verbose = False
|
||||||
|
return cfg
|
||||||
|
|
||||||
|
|
||||||
|
class NotThisMethod(Exception):
|
||||||
|
"""Exception raised if a method is not valid for the current scenario."""
|
||||||
|
|
||||||
|
|
||||||
|
LONG_VERSION_PY = {}
|
||||||
|
HANDLERS = {}
|
||||||
|
|
||||||
|
|
||||||
|
def register_vcs_handler(vcs, method): # decorator
|
||||||
|
"""Decorator to mark a method as the handler for a particular VCS."""
|
||||||
|
|
||||||
|
def decorate(f):
|
||||||
|
"""Store f in HANDLERS[vcs][method]."""
|
||||||
|
if vcs not in HANDLERS:
|
||||||
|
HANDLERS[vcs] = {}
|
||||||
|
HANDLERS[vcs][method] = f
|
||||||
|
return f
|
||||||
|
|
||||||
|
return decorate
|
||||||
|
|
||||||
|
|
||||||
|
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None):
|
||||||
|
"""Call the given command(s)."""
|
||||||
|
assert isinstance(commands, list)
|
||||||
|
p = None
|
||||||
|
for c in commands:
|
||||||
|
try:
|
||||||
|
dispcmd = str([c] + args)
|
||||||
|
# remember shell=False, so use git.cmd on windows, not just git
|
||||||
|
p = subprocess.Popen(
|
||||||
|
[c] + args,
|
||||||
|
cwd=cwd,
|
||||||
|
env=env,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=(subprocess.PIPE if hide_stderr else None),
|
||||||
|
)
|
||||||
|
break
|
||||||
|
except EnvironmentError:
|
||||||
|
e = sys.exc_info()[1]
|
||||||
|
if e.errno == errno.ENOENT:
|
||||||
|
continue
|
||||||
|
if verbose:
|
||||||
|
print("unable to run %s" % dispcmd)
|
||||||
|
print(e)
|
||||||
|
return None, None
|
||||||
|
else:
|
||||||
|
if verbose:
|
||||||
|
print("unable to find command, tried %s" % (commands,))
|
||||||
|
return None, None
|
||||||
|
stdout = p.communicate()[0].strip()
|
||||||
|
if sys.version_info[0] >= 3:
|
||||||
|
stdout = stdout.decode()
|
||||||
|
if p.returncode != 0:
|
||||||
|
if verbose:
|
||||||
|
print("unable to run %s (error)" % dispcmd)
|
||||||
|
print("stdout was %s" % stdout)
|
||||||
|
return None, p.returncode
|
||||||
|
return stdout, p.returncode
|
||||||
|
|
||||||
|
|
||||||
|
def versions_from_parentdir(parentdir_prefix, root, verbose):
|
||||||
|
"""Try to determine the version from the parent directory name.
|
||||||
|
|
||||||
|
Source tarballs conventionally unpack into a directory that includes both
|
||||||
|
the project name and a version string. We will also support searching up
|
||||||
|
two directory levels for an appropriately named parent directory
|
||||||
|
"""
|
||||||
|
rootdirs = []
|
||||||
|
|
||||||
|
for i in range(3):
|
||||||
|
dirname = os.path.basename(root)
|
||||||
|
if dirname.startswith(parentdir_prefix):
|
||||||
|
return {
|
||||||
|
"version": dirname[len(parentdir_prefix) :],
|
||||||
|
"full-revisionid": None,
|
||||||
|
"dirty": False,
|
||||||
|
"error": None,
|
||||||
|
"date": None,
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
rootdirs.append(root)
|
||||||
|
root = os.path.dirname(root) # up a level
|
||||||
|
|
||||||
|
if verbose:
|
||||||
|
print(
|
||||||
|
"Tried directories %s but none started with prefix %s"
|
||||||
|
% (str(rootdirs), parentdir_prefix)
|
||||||
|
)
|
||||||
|
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
|
||||||
|
|
||||||
|
|
||||||
|
@register_vcs_handler("git", "get_keywords")
|
||||||
|
def git_get_keywords(versionfile_abs):
|
||||||
|
"""Extract version information from the given file."""
|
||||||
|
# the code embedded in _version.py can just fetch the value of these
|
||||||
|
# keywords. When used from setup.py, we don't want to import _version.py,
|
||||||
|
# so we do it with a regexp instead. This function is not used from
|
||||||
|
# _version.py.
|
||||||
|
keywords = {}
|
||||||
|
try:
|
||||||
|
f = open(versionfile_abs, "r")
|
||||||
|
for line in f.readlines():
|
||||||
|
if line.strip().startswith("git_refnames ="):
|
||||||
|
mo = re.search(r'=\s*"(.*)"', line)
|
||||||
|
if mo:
|
||||||
|
keywords["refnames"] = mo.group(1)
|
||||||
|
if line.strip().startswith("git_full ="):
|
||||||
|
mo = re.search(r'=\s*"(.*)"', line)
|
||||||
|
if mo:
|
||||||
|
keywords["full"] = mo.group(1)
|
||||||
|
if line.strip().startswith("git_date ="):
|
||||||
|
mo = re.search(r'=\s*"(.*)"', line)
|
||||||
|
if mo:
|
||||||
|
keywords["date"] = mo.group(1)
|
||||||
|
f.close()
|
||||||
|
except EnvironmentError:
|
||||||
|
pass
|
||||||
|
return keywords
|
||||||
|
|
||||||
|
|
||||||
|
@register_vcs_handler("git", "keywords")
|
||||||
|
def git_versions_from_keywords(keywords, tag_prefix, verbose):
|
||||||
|
"""Get version information from git keywords."""
|
||||||
|
if not keywords:
|
||||||
|
raise NotThisMethod("no keywords at all, weird")
|
||||||
|
date = keywords.get("date")
|
||||||
|
if date is not None:
|
||||||
|
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
|
||||||
|
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
|
||||||
|
# -like" string, which we must then edit to make compliant), because
|
||||||
|
# it's been around since git-1.5.3, and it's too difficult to
|
||||||
|
# discover which version we're using, or to work around using an
|
||||||
|
# older one.
|
||||||
|
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
|
||||||
|
refnames = keywords["refnames"].strip()
|
||||||
|
if refnames.startswith("$Format"):
|
||||||
|
if verbose:
|
||||||
|
print("keywords are unexpanded, not using")
|
||||||
|
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
|
||||||
|
refs = set([r.strip() for r in refnames.strip("()").split(",")])
|
||||||
|
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
|
||||||
|
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
|
||||||
|
TAG = "tag: "
|
||||||
|
tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)])
|
||||||
|
if not tags:
|
||||||
|
# Either we're using git < 1.8.3, or there really are no tags. We use
|
||||||
|
# a heuristic: assume all version tags have a digit. The old git %d
|
||||||
|
# expansion behaves like git log --decorate=short and strips out the
|
||||||
|
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
|
||||||
|
# between branches and tags. By ignoring refnames without digits, we
|
||||||
|
# filter out many common branch names like "release" and
|
||||||
|
# "stabilization", as well as "HEAD" and "master".
|
||||||
|
tags = set([r for r in refs if re.search(r"\d", r)])
|
||||||
|
if verbose:
|
||||||
|
print("discarding '%s', no digits" % ",".join(refs - tags))
|
||||||
|
if verbose:
|
||||||
|
print("likely tags: %s" % ",".join(sorted(tags)))
|
||||||
|
for ref in sorted(tags):
|
||||||
|
# sorting will prefer e.g. "2.0" over "2.0rc1"
|
||||||
|
if ref.startswith(tag_prefix):
|
||||||
|
r = ref[len(tag_prefix) :]
|
||||||
|
if verbose:
|
||||||
|
print("picking %s" % r)
|
||||||
|
return {
|
||||||
|
"version": r,
|
||||||
|
"full-revisionid": keywords["full"].strip(),
|
||||||
|
"dirty": False,
|
||||||
|
"error": None,
|
||||||
|
"date": date,
|
||||||
|
}
|
||||||
|
# no suitable tags, so version is "0+unknown", but full hex is still there
|
||||||
|
if verbose:
|
||||||
|
print("no suitable tags, using unknown + full revision id")
|
||||||
|
return {
|
||||||
|
"version": "0+unknown",
|
||||||
|
"full-revisionid": keywords["full"].strip(),
|
||||||
|
"dirty": False,
|
||||||
|
"error": "no suitable tags",
|
||||||
|
"date": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@register_vcs_handler("git", "pieces_from_vcs")
|
||||||
|
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
|
||||||
|
"""Get version from 'git describe' in the root of the source tree.
|
||||||
|
|
||||||
|
This only gets called if the git-archive 'subst' keywords were *not*
|
||||||
|
expanded, and _version.py hasn't already been rewritten with a short
|
||||||
|
version string, meaning we're inside a checked out source tree.
|
||||||
|
"""
|
||||||
|
GITS = ["git"]
|
||||||
|
if sys.platform == "win32":
|
||||||
|
GITS = ["git.cmd", "git.exe"]
|
||||||
|
|
||||||
|
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True)
|
||||||
|
if rc != 0:
|
||||||
|
if verbose:
|
||||||
|
print("Directory %s not under git control" % root)
|
||||||
|
raise NotThisMethod("'git rev-parse --git-dir' returned error")
|
||||||
|
|
||||||
|
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
|
||||||
|
# if there isn't one, this yields HEX[-dirty] (no NUM)
|
||||||
|
describe_out, rc = run_command(
|
||||||
|
GITS,
|
||||||
|
[
|
||||||
|
"describe",
|
||||||
|
"--tags",
|
||||||
|
"--dirty",
|
||||||
|
"--always",
|
||||||
|
"--long",
|
||||||
|
"--match",
|
||||||
|
"%s*" % tag_prefix,
|
||||||
|
],
|
||||||
|
cwd=root,
|
||||||
|
)
|
||||||
|
# --long was added in git-1.5.5
|
||||||
|
if describe_out is None:
|
||||||
|
raise NotThisMethod("'git describe' failed")
|
||||||
|
describe_out = describe_out.strip()
|
||||||
|
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
|
||||||
|
if full_out is None:
|
||||||
|
raise NotThisMethod("'git rev-parse' failed")
|
||||||
|
full_out = full_out.strip()
|
||||||
|
|
||||||
|
pieces = {}
|
||||||
|
pieces["long"] = full_out
|
||||||
|
pieces["short"] = full_out[:7] # maybe improved later
|
||||||
|
pieces["error"] = None
|
||||||
|
|
||||||
|
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
|
||||||
|
# TAG might have hyphens.
|
||||||
|
git_describe = describe_out
|
||||||
|
|
||||||
|
# look for -dirty suffix
|
||||||
|
dirty = git_describe.endswith("-dirty")
|
||||||
|
pieces["dirty"] = dirty
|
||||||
|
if dirty:
|
||||||
|
git_describe = git_describe[: git_describe.rindex("-dirty")]
|
||||||
|
|
||||||
|
# now we have TAG-NUM-gHEX or HEX
|
||||||
|
|
||||||
|
if "-" in git_describe:
|
||||||
|
# TAG-NUM-gHEX
|
||||||
|
mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe)
|
||||||
|
if not mo:
|
||||||
|
# unparseable. Maybe git-describe is misbehaving?
|
||||||
|
pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out
|
||||||
|
return pieces
|
||||||
|
|
||||||
|
# tag
|
||||||
|
full_tag = mo.group(1)
|
||||||
|
if not full_tag.startswith(tag_prefix):
|
||||||
|
if verbose:
|
||||||
|
fmt = "tag '%s' doesn't start with prefix '%s'"
|
||||||
|
print(fmt % (full_tag, tag_prefix))
|
||||||
|
pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (
|
||||||
|
full_tag,
|
||||||
|
tag_prefix,
|
||||||
|
)
|
||||||
|
return pieces
|
||||||
|
pieces["closest-tag"] = full_tag[len(tag_prefix) :]
|
||||||
|
|
||||||
|
# distance: number of commits since tag
|
||||||
|
pieces["distance"] = int(mo.group(2))
|
||||||
|
|
||||||
|
# commit: short hex revision ID
|
||||||
|
pieces["short"] = mo.group(3)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# HEX: no tags
|
||||||
|
pieces["closest-tag"] = None
|
||||||
|
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root)
|
||||||
|
pieces["distance"] = int(count_out) # total number of commits
|
||||||
|
|
||||||
|
# commit date: see ISO-8601 comment in git_versions_from_keywords()
|
||||||
|
date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[
|
||||||
|
0
|
||||||
|
].strip()
|
||||||
|
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
|
||||||
|
|
||||||
|
return pieces
|
||||||
|
|
||||||
|
|
||||||
|
def plus_or_dot(pieces):
|
||||||
|
"""Return a + if we don't already have one, else return a ."""
|
||||||
|
if "+" in pieces.get("closest-tag", ""):
|
||||||
|
return "."
|
||||||
|
return "+"
|
||||||
|
|
||||||
|
|
||||||
|
def render_pep440(pieces):
|
||||||
|
"""Build up version string, with post-release "local version identifier".
|
||||||
|
|
||||||
|
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
|
||||||
|
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
|
||||||
|
|
||||||
|
Exceptions:
|
||||||
|
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
|
||||||
|
"""
|
||||||
|
if pieces["closest-tag"]:
|
||||||
|
rendered = pieces["closest-tag"]
|
||||||
|
if pieces["distance"] or pieces["dirty"]:
|
||||||
|
rendered += plus_or_dot(pieces)
|
||||||
|
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
|
||||||
|
if pieces["dirty"]:
|
||||||
|
rendered += ".dirty"
|
||||||
|
else:
|
||||||
|
# exception #1
|
||||||
|
rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"])
|
||||||
|
if pieces["dirty"]:
|
||||||
|
rendered += ".dirty"
|
||||||
|
return rendered
|
||||||
|
|
||||||
|
|
||||||
|
def render_pep440_pre(pieces):
|
||||||
|
"""TAG[.post.devDISTANCE] -- No -dirty.
|
||||||
|
|
||||||
|
Exceptions:
|
||||||
|
1: no tags. 0.post.devDISTANCE
|
||||||
|
"""
|
||||||
|
if pieces["closest-tag"]:
|
||||||
|
rendered = pieces["closest-tag"]
|
||||||
|
if pieces["distance"]:
|
||||||
|
rendered += ".post.dev%d" % pieces["distance"]
|
||||||
|
else:
|
||||||
|
# exception #1
|
||||||
|
rendered = "0.post.dev%d" % pieces["distance"]
|
||||||
|
return rendered
|
||||||
|
|
||||||
|
|
||||||
|
def render_pep440_post(pieces):
|
||||||
|
"""TAG[.postDISTANCE[.dev0]+gHEX] .
|
||||||
|
|
||||||
|
The ".dev0" means dirty. Note that .dev0 sorts backwards
|
||||||
|
(a dirty tree will appear "older" than the corresponding clean one),
|
||||||
|
but you shouldn't be releasing software with -dirty anyways.
|
||||||
|
|
||||||
|
Exceptions:
|
||||||
|
1: no tags. 0.postDISTANCE[.dev0]
|
||||||
|
"""
|
||||||
|
if pieces["closest-tag"]:
|
||||||
|
rendered = pieces["closest-tag"]
|
||||||
|
if pieces["distance"] or pieces["dirty"]:
|
||||||
|
rendered += ".post%d" % pieces["distance"]
|
||||||
|
if pieces["dirty"]:
|
||||||
|
rendered += ".dev0"
|
||||||
|
rendered += plus_or_dot(pieces)
|
||||||
|
rendered += "g%s" % pieces["short"]
|
||||||
|
else:
|
||||||
|
# exception #1
|
||||||
|
rendered = "0.post%d" % pieces["distance"]
|
||||||
|
if pieces["dirty"]:
|
||||||
|
rendered += ".dev0"
|
||||||
|
rendered += "+g%s" % pieces["short"]
|
||||||
|
return rendered
|
||||||
|
|
||||||
|
|
||||||
|
def render_pep440_old(pieces):
|
||||||
|
"""TAG[.postDISTANCE[.dev0]] .
|
||||||
|
|
||||||
|
The ".dev0" means dirty.
|
||||||
|
|
||||||
|
Eexceptions:
|
||||||
|
1: no tags. 0.postDISTANCE[.dev0]
|
||||||
|
"""
|
||||||
|
if pieces["closest-tag"]:
|
||||||
|
rendered = pieces["closest-tag"]
|
||||||
|
if pieces["distance"] or pieces["dirty"]:
|
||||||
|
rendered += ".post%d" % pieces["distance"]
|
||||||
|
if pieces["dirty"]:
|
||||||
|
rendered += ".dev0"
|
||||||
|
else:
|
||||||
|
# exception #1
|
||||||
|
rendered = "0.post%d" % pieces["distance"]
|
||||||
|
if pieces["dirty"]:
|
||||||
|
rendered += ".dev0"
|
||||||
|
return rendered
|
||||||
|
|
||||||
|
|
||||||
|
def render_git_describe(pieces):
|
||||||
|
"""TAG[-DISTANCE-gHEX][-dirty].
|
||||||
|
|
||||||
|
Like 'git describe --tags --dirty --always'.
|
||||||
|
|
||||||
|
Exceptions:
|
||||||
|
1: no tags. HEX[-dirty] (note: no 'g' prefix)
|
||||||
|
"""
|
||||||
|
if pieces["closest-tag"]:
|
||||||
|
rendered = pieces["closest-tag"]
|
||||||
|
if pieces["distance"]:
|
||||||
|
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
|
||||||
|
else:
|
||||||
|
# exception #1
|
||||||
|
rendered = pieces["short"]
|
||||||
|
if pieces["dirty"]:
|
||||||
|
rendered += "-dirty"
|
||||||
|
return rendered
|
||||||
|
|
||||||
|
|
||||||
|
def render_git_describe_long(pieces):
|
||||||
|
"""TAG-DISTANCE-gHEX[-dirty].
|
||||||
|
|
||||||
|
Like 'git describe --tags --dirty --always -long'.
|
||||||
|
The distance/hash is unconditional.
|
||||||
|
|
||||||
|
Exceptions:
|
||||||
|
1: no tags. HEX[-dirty] (note: no 'g' prefix)
|
||||||
|
"""
|
||||||
|
if pieces["closest-tag"]:
|
||||||
|
rendered = pieces["closest-tag"]
|
||||||
|
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
|
||||||
|
else:
|
||||||
|
# exception #1
|
||||||
|
rendered = pieces["short"]
|
||||||
|
if pieces["dirty"]:
|
||||||
|
rendered += "-dirty"
|
||||||
|
return rendered
|
||||||
|
|
||||||
|
|
||||||
|
def render(pieces, style):
|
||||||
|
"""Render the given version pieces into the requested style."""
|
||||||
|
if pieces["error"]:
|
||||||
|
return {
|
||||||
|
"version": "unknown",
|
||||||
|
"full-revisionid": pieces.get("long"),
|
||||||
|
"dirty": None,
|
||||||
|
"error": pieces["error"],
|
||||||
|
"date": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
if not style or style == "default":
|
||||||
|
style = "pep440" # the default
|
||||||
|
|
||||||
|
if style == "pep440":
|
||||||
|
rendered = render_pep440(pieces)
|
||||||
|
elif style == "pep440-pre":
|
||||||
|
rendered = render_pep440_pre(pieces)
|
||||||
|
elif style == "pep440-post":
|
||||||
|
rendered = render_pep440_post(pieces)
|
||||||
|
elif style == "pep440-old":
|
||||||
|
rendered = render_pep440_old(pieces)
|
||||||
|
elif style == "git-describe":
|
||||||
|
rendered = render_git_describe(pieces)
|
||||||
|
elif style == "git-describe-long":
|
||||||
|
rendered = render_git_describe_long(pieces)
|
||||||
|
else:
|
||||||
|
raise ValueError("unknown style '%s'" % style)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"version": rendered,
|
||||||
|
"full-revisionid": pieces["long"],
|
||||||
|
"dirty": pieces["dirty"],
|
||||||
|
"error": None,
|
||||||
|
"date": pieces.get("date"),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_versions():
|
||||||
|
"""Get version information or return default if unable to do so."""
|
||||||
|
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
|
||||||
|
# __file__, we can work backwards from there to the root. Some
|
||||||
|
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
|
||||||
|
# case we can only use expanded keywords.
|
||||||
|
|
||||||
|
cfg = get_config()
|
||||||
|
verbose = cfg.verbose
|
||||||
|
|
||||||
|
try:
|
||||||
|
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose)
|
||||||
|
except NotThisMethod:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
root = os.path.realpath(__file__)
|
||||||
|
# versionfile_source is the relative path from the top of the source
|
||||||
|
# tree (where the .git directory might live) to this file. Invert
|
||||||
|
# this to find the root from __file__.
|
||||||
|
for i in cfg.versionfile_source.split("/"):
|
||||||
|
root = os.path.dirname(root)
|
||||||
|
except NameError:
|
||||||
|
return {
|
||||||
|
"version": "0+unknown",
|
||||||
|
"full-revisionid": None,
|
||||||
|
"dirty": None,
|
||||||
|
"error": "unable to find root of source tree",
|
||||||
|
"date": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
|
||||||
|
return render(pieces, cfg.style)
|
||||||
|
except NotThisMethod:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
if cfg.parentdir_prefix:
|
||||||
|
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
|
||||||
|
except NotThisMethod:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return {
|
||||||
|
"version": "0+unknown",
|
||||||
|
"full-revisionid": None,
|
||||||
|
"dirty": None,
|
||||||
|
"error": "unable to compute version",
|
||||||
|
"date": None,
|
||||||
|
}
|
||||||
|
|
@ -1,23 +0,0 @@
|
||||||
from datasette import hookimpl
|
|
||||||
from itsdangerous import BadSignature
|
|
||||||
from datasette.utils import baseconv
|
|
||||||
import time
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl
|
|
||||||
def actor_from_request(datasette, request):
|
|
||||||
if "ds_actor" not in request.cookies:
|
|
||||||
return None
|
|
||||||
try:
|
|
||||||
decoded = datasette.unsign(request.cookies["ds_actor"], "actor")
|
|
||||||
# If it has "e" and "a" keys process the "e" expiry
|
|
||||||
if not isinstance(decoded, dict) or "a" not in decoded:
|
|
||||||
return None
|
|
||||||
expires_at = decoded.get("e")
|
|
||||||
if expires_at:
|
|
||||||
timestamp = int(baseconv.base62.decode(expires_at))
|
|
||||||
if time.time() > timestamp:
|
|
||||||
return None
|
|
||||||
return decoded["a"]
|
|
||||||
except BadSignature:
|
|
||||||
return None
|
|
||||||
2618
datasette/app.py
2618
datasette/app.py
File diff suppressed because it is too large
Load diff
|
|
@ -1,61 +0,0 @@
|
||||||
from datasette import hookimpl
|
|
||||||
from datasette.utils.asgi import Response, BadRequest
|
|
||||||
from datasette.utils import to_css_class
|
|
||||||
import hashlib
|
|
||||||
|
|
||||||
_BLOB_COLUMN = "_blob_column"
|
|
||||||
_BLOB_HASH = "_blob_hash"
|
|
||||||
|
|
||||||
|
|
||||||
async def render_blob(datasette, database, rows, columns, request, table, view_name):
|
|
||||||
if _BLOB_COLUMN not in request.args:
|
|
||||||
raise BadRequest(f"?{_BLOB_COLUMN}= is required")
|
|
||||||
blob_column = request.args[_BLOB_COLUMN]
|
|
||||||
if blob_column not in columns:
|
|
||||||
raise BadRequest(f"{blob_column} is not a valid column")
|
|
||||||
|
|
||||||
# If ?_blob_hash= provided, use that to select the row - otherwise use first row
|
|
||||||
blob_hash = None
|
|
||||||
if _BLOB_HASH in request.args:
|
|
||||||
blob_hash = request.args[_BLOB_HASH]
|
|
||||||
for row in rows:
|
|
||||||
value = row[blob_column]
|
|
||||||
if hashlib.sha256(value).hexdigest() == blob_hash:
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
# Loop did not break
|
|
||||||
raise BadRequest(
|
|
||||||
"Link has expired - the requested binary content has changed or could not be found."
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
row = rows[0]
|
|
||||||
|
|
||||||
value = row[blob_column]
|
|
||||||
filename_bits = []
|
|
||||||
if table:
|
|
||||||
filename_bits.append(to_css_class(table))
|
|
||||||
if "pks" in request.url_vars:
|
|
||||||
filename_bits.append(request.url_vars["pks"])
|
|
||||||
filename_bits.append(to_css_class(blob_column))
|
|
||||||
if blob_hash:
|
|
||||||
filename_bits.append(blob_hash[:6])
|
|
||||||
filename = "-".join(filename_bits) + ".blob"
|
|
||||||
headers = {
|
|
||||||
"X-Content-Type-Options": "nosniff",
|
|
||||||
"Content-Disposition": f'attachment; filename="{filename}"',
|
|
||||||
}
|
|
||||||
return Response(
|
|
||||||
body=value or b"",
|
|
||||||
status=200,
|
|
||||||
headers=headers,
|
|
||||||
content_type="application/binary",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl
|
|
||||||
def register_output_renderer():
|
|
||||||
return {
|
|
||||||
"extension": "blob",
|
|
||||||
"render": render_blob,
|
|
||||||
"can_render": lambda: False,
|
|
||||||
}
|
|
||||||
763
datasette/cli.py
763
datasette/cli.py
|
|
@ -2,156 +2,84 @@ import asyncio
|
||||||
import uvicorn
|
import uvicorn
|
||||||
import click
|
import click
|
||||||
from click import formatting
|
from click import formatting
|
||||||
from click.types import CompositeParamType
|
|
||||||
from click_default_group import DefaultGroup
|
from click_default_group import DefaultGroup
|
||||||
import functools
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import pathlib
|
|
||||||
from runpy import run_module
|
|
||||||
import shutil
|
import shutil
|
||||||
from subprocess import call
|
from subprocess import call
|
||||||
import sys
|
import sys
|
||||||
import textwrap
|
from .app import Datasette, DEFAULT_CONFIG, CONFIG_OPTIONS, pm
|
||||||
import webbrowser
|
|
||||||
from .app import (
|
|
||||||
Datasette,
|
|
||||||
DEFAULT_SETTINGS,
|
|
||||||
SETTINGS,
|
|
||||||
SQLITE_LIMIT_ATTACHED,
|
|
||||||
pm,
|
|
||||||
)
|
|
||||||
from .utils import (
|
from .utils import (
|
||||||
LoadExtension,
|
|
||||||
StartupError,
|
|
||||||
check_connection,
|
|
||||||
deep_dict_update,
|
|
||||||
find_spatialite,
|
|
||||||
parse_metadata,
|
|
||||||
ConnectionProblem,
|
|
||||||
SpatialiteConnectionProblem,
|
|
||||||
initial_path_for_datasette,
|
|
||||||
pairs_to_nested_config,
|
|
||||||
temporary_docker_directory,
|
temporary_docker_directory,
|
||||||
value_as_boolean,
|
value_as_boolean,
|
||||||
SpatialiteNotFound,
|
|
||||||
StaticMount,
|
StaticMount,
|
||||||
ValueAsBooleanError,
|
ValueAsBooleanError,
|
||||||
)
|
)
|
||||||
from .utils.sqlite import sqlite3
|
|
||||||
from .utils.testing import TestClient
|
|
||||||
from .version import __version__
|
|
||||||
|
|
||||||
|
|
||||||
def run_sync(coro_func):
|
class Config(click.ParamType):
|
||||||
"""Run an async callable to completion on a fresh event loop."""
|
name = "config"
|
||||||
loop = asyncio.new_event_loop()
|
|
||||||
try:
|
|
||||||
asyncio.set_event_loop(loop)
|
|
||||||
return loop.run_until_complete(coro_func())
|
|
||||||
finally:
|
|
||||||
asyncio.set_event_loop(None)
|
|
||||||
loop.close()
|
|
||||||
|
|
||||||
|
|
||||||
# Use Rich for tracebacks if it is installed
|
|
||||||
try:
|
|
||||||
from rich.traceback import install
|
|
||||||
|
|
||||||
install(show_locals=True)
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class Setting(CompositeParamType):
|
|
||||||
name = "setting"
|
|
||||||
arity = 2
|
|
||||||
|
|
||||||
def convert(self, config, param, ctx):
|
def convert(self, config, param, ctx):
|
||||||
name, value = config
|
if ":" not in config:
|
||||||
if name in DEFAULT_SETTINGS:
|
self.fail('"{}" should be name:value'.format(config), param, ctx)
|
||||||
# For backwards compatibility with how this worked prior to
|
return
|
||||||
# Datasette 1.0, we turn bare setting names into setting.name
|
name, value = config.split(":")
|
||||||
# Type checking for those older settings
|
if name not in DEFAULT_CONFIG:
|
||||||
default = DEFAULT_SETTINGS[name]
|
self.fail(
|
||||||
name = "settings.{}".format(name)
|
"{} is not a valid option (--help-config to see all)".format(name),
|
||||||
if isinstance(default, bool):
|
param,
|
||||||
try:
|
ctx,
|
||||||
return name, "true" if value_as_boolean(value) else "false"
|
)
|
||||||
except ValueAsBooleanError:
|
return
|
||||||
self.fail(f'"{name}" should be on/off/true/false/1/0', param, ctx)
|
# Type checking
|
||||||
elif isinstance(default, int):
|
default = DEFAULT_CONFIG[name]
|
||||||
if not value.isdigit():
|
if isinstance(default, bool):
|
||||||
self.fail(f'"{name}" should be an integer', param, ctx)
|
try:
|
||||||
return name, value
|
return name, value_as_boolean(value)
|
||||||
elif isinstance(default, str):
|
except ValueAsBooleanError:
|
||||||
return name, value
|
self.fail(
|
||||||
else:
|
'"{}" should be on/off/true/false/1/0'.format(name), param, ctx
|
||||||
# Should never happen:
|
|
||||||
self.fail("Invalid option")
|
|
||||||
return name, value
|
|
||||||
|
|
||||||
|
|
||||||
def sqlite_extensions(fn):
|
|
||||||
fn = click.option(
|
|
||||||
"sqlite_extensions",
|
|
||||||
"--load-extension",
|
|
||||||
type=LoadExtension(),
|
|
||||||
envvar="DATASETTE_LOAD_EXTENSION",
|
|
||||||
multiple=True,
|
|
||||||
help="Path to a SQLite extension to load, and optional entrypoint",
|
|
||||||
)(fn)
|
|
||||||
|
|
||||||
# Wrap it in a custom error handler
|
|
||||||
@functools.wraps(fn)
|
|
||||||
def wrapped(*args, **kwargs):
|
|
||||||
try:
|
|
||||||
return fn(*args, **kwargs)
|
|
||||||
except AttributeError as e:
|
|
||||||
if "enable_load_extension" in str(e):
|
|
||||||
raise click.ClickException(
|
|
||||||
textwrap.dedent(
|
|
||||||
"""
|
|
||||||
Your Python installation does not have the ability to load SQLite extensions.
|
|
||||||
|
|
||||||
More information: https://datasette.io/help/extensions
|
|
||||||
"""
|
|
||||||
).strip()
|
|
||||||
)
|
)
|
||||||
raise
|
return
|
||||||
|
elif isinstance(default, int):
|
||||||
return wrapped
|
if not value.isdigit():
|
||||||
|
self.fail('"{}" should be an integer'.format(name), param, ctx)
|
||||||
|
return
|
||||||
|
return name, int(value)
|
||||||
|
else:
|
||||||
|
# Should never happen:
|
||||||
|
self.fail("Invalid option")
|
||||||
|
|
||||||
|
|
||||||
@click.group(cls=DefaultGroup, default="serve", default_if_no_args=True)
|
@click.group(cls=DefaultGroup, default="serve", default_if_no_args=True)
|
||||||
@click.version_option(version=__version__)
|
@click.version_option()
|
||||||
def cli():
|
def cli():
|
||||||
"""
|
"""
|
||||||
Datasette is an open source multi-tool for exploring and publishing data
|
Datasette!
|
||||||
|
|
||||||
\b
|
|
||||||
About Datasette: https://datasette.io/
|
|
||||||
Full documentation: https://docs.datasette.io/
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.argument("files", type=click.Path(exists=True), nargs=-1)
|
@click.argument("files", type=click.Path(exists=True), nargs=-1)
|
||||||
@click.option("--inspect-file", default="-")
|
@click.option("--inspect-file", default="-")
|
||||||
@sqlite_extensions
|
@click.option(
|
||||||
|
"sqlite_extensions",
|
||||||
|
"--load-extension",
|
||||||
|
envvar="SQLITE_EXTENSIONS",
|
||||||
|
multiple=True,
|
||||||
|
type=click.Path(exists=True, resolve_path=True),
|
||||||
|
help="Path to a SQLite extension to load",
|
||||||
|
)
|
||||||
def inspect(files, inspect_file, sqlite_extensions):
|
def inspect(files, inspect_file, sqlite_extensions):
|
||||||
"""
|
app = Datasette([], immutables=files, sqlite_extensions=sqlite_extensions)
|
||||||
Generate JSON summary of provided database files
|
|
||||||
|
|
||||||
This can then be passed to "datasette --inspect-file" to speed up count
|
|
||||||
operations against immutable database files.
|
|
||||||
"""
|
|
||||||
inspect_data = run_sync(lambda: inspect_(files, sqlite_extensions))
|
|
||||||
if inspect_file == "-":
|
if inspect_file == "-":
|
||||||
sys.stdout.write(json.dumps(inspect_data, indent=2))
|
out = sys.stdout
|
||||||
else:
|
else:
|
||||||
with open(inspect_file, "w") as fp:
|
out = open(inspect_file, "w")
|
||||||
fp.write(json.dumps(inspect_data, indent=2))
|
loop = asyncio.get_event_loop()
|
||||||
|
inspect_data = loop.run_until_complete(inspect_(files, sqlite_extensions))
|
||||||
|
out.write(json.dumps(inspect_data, indent=2))
|
||||||
|
|
||||||
|
|
||||||
async def inspect_(files, sqlite_extensions):
|
async def inspect_(files, sqlite_extensions):
|
||||||
|
|
@ -171,9 +99,18 @@ async def inspect_(files, sqlite_extensions):
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
@cli.group()
|
class PublishAliases(click.Group):
|
||||||
|
aliases = {"now": "nowv1"}
|
||||||
|
|
||||||
|
def get_command(self, ctx, cmd_name):
|
||||||
|
if cmd_name in self.aliases:
|
||||||
|
return click.Group.get_command(self, ctx, self.aliases[cmd_name])
|
||||||
|
return click.Group.get_command(self, ctx, cmd_name)
|
||||||
|
|
||||||
|
|
||||||
|
@cli.group(cls=PublishAliases)
|
||||||
def publish():
|
def publish():
|
||||||
"""Publish specified SQLite database files to the internet along with a Datasette-powered interface and API"""
|
"Publish specified SQLite database files to the internet along with a Datasette-powered interface and API"
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -183,23 +120,15 @@ pm.hook.publish_subcommand(publish=publish)
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option("--all", help="Include built-in default plugins", is_flag=True)
|
@click.option("--all", help="Include built-in default plugins", is_flag=True)
|
||||||
@click.option(
|
|
||||||
"--requirements", help="Output requirements.txt of installed plugins", is_flag=True
|
|
||||||
)
|
|
||||||
@click.option(
|
@click.option(
|
||||||
"--plugins-dir",
|
"--plugins-dir",
|
||||||
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
||||||
help="Path to directory containing custom plugins",
|
help="Path to directory containing custom plugins",
|
||||||
)
|
)
|
||||||
def plugins(all, requirements, plugins_dir):
|
def plugins(all, plugins_dir):
|
||||||
"""List currently installed plugins"""
|
"List currently available plugins"
|
||||||
app = Datasette([], plugins_dir=plugins_dir)
|
app = Datasette([], plugins_dir=plugins_dir)
|
||||||
if requirements:
|
click.echo(json.dumps(app.plugins(all), indent=4))
|
||||||
for plugin in app._plugins():
|
|
||||||
if plugin["version"]:
|
|
||||||
click.echo("{}=={}".format(plugin["name"], plugin["version"]))
|
|
||||||
else:
|
|
||||||
click.echo(json.dumps(app._plugins(all=all), indent=4))
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
|
|
@ -213,10 +142,10 @@ def plugins(all, requirements, plugins_dir):
|
||||||
"-m",
|
"-m",
|
||||||
"--metadata",
|
"--metadata",
|
||||||
type=click.File(mode="r"),
|
type=click.File(mode="r"),
|
||||||
help="Path to JSON/YAML file containing metadata to publish",
|
help="Path to JSON file containing metadata to publish",
|
||||||
)
|
)
|
||||||
@click.option("--extra-options", help="Extra options to pass to datasette serve")
|
@click.option("--extra-options", help="Extra options to pass to datasette serve")
|
||||||
@click.option("--branch", help="Install datasette from a GitHub branch e.g. main")
|
@click.option("--branch", help="Install datasette from a GitHub branch e.g. master")
|
||||||
@click.option(
|
@click.option(
|
||||||
"--template-dir",
|
"--template-dir",
|
||||||
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
||||||
|
|
@ -230,7 +159,7 @@ def plugins(all, requirements, plugins_dir):
|
||||||
@click.option(
|
@click.option(
|
||||||
"--static",
|
"--static",
|
||||||
type=StaticMount(),
|
type=StaticMount(),
|
||||||
help="Serve static files from this directory at /MOUNT/...",
|
help="mountpoint:path-to-directory for serving static files",
|
||||||
multiple=True,
|
multiple=True,
|
||||||
)
|
)
|
||||||
@click.option(
|
@click.option(
|
||||||
|
|
@ -238,19 +167,6 @@ def plugins(all, requirements, plugins_dir):
|
||||||
)
|
)
|
||||||
@click.option("--spatialite", is_flag=True, help="Enable SpatialLite extension")
|
@click.option("--spatialite", is_flag=True, help="Enable SpatialLite extension")
|
||||||
@click.option("--version-note", help="Additional note to show on /-/versions")
|
@click.option("--version-note", help="Additional note to show on /-/versions")
|
||||||
@click.option(
|
|
||||||
"--secret",
|
|
||||||
help="Secret used for signing secure values, such as signed cookies",
|
|
||||||
envvar="DATASETTE_PUBLISH_SECRET",
|
|
||||||
default=lambda: os.urandom(32).hex(),
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"-p",
|
|
||||||
"--port",
|
|
||||||
default=8001,
|
|
||||||
type=click.IntRange(1, 65535),
|
|
||||||
help="Port to run the server on, defaults to 8001",
|
|
||||||
)
|
|
||||||
@click.option("--title", help="Title for metadata")
|
@click.option("--title", help="Title for metadata")
|
||||||
@click.option("--license", help="License label for metadata")
|
@click.option("--license", help="License label for metadata")
|
||||||
@click.option("--license_url", help="License URL for metadata")
|
@click.option("--license_url", help="License URL for metadata")
|
||||||
|
|
@ -270,11 +186,9 @@ def package(
|
||||||
install,
|
install,
|
||||||
spatialite,
|
spatialite,
|
||||||
version_note,
|
version_note,
|
||||||
secret,
|
**extra_metadata
|
||||||
port,
|
|
||||||
**extra_metadata,
|
|
||||||
):
|
):
|
||||||
"""Package SQLite files into a Datasette Docker container"""
|
"Package specified SQLite files into a new datasette Docker container"
|
||||||
if not shutil.which("docker"):
|
if not shutil.which("docker"):
|
||||||
click.secho(
|
click.secho(
|
||||||
' The package command requires "docker" to be installed and configured ',
|
' The package command requires "docker" to be installed and configured ',
|
||||||
|
|
@ -287,18 +201,16 @@ def package(
|
||||||
with temporary_docker_directory(
|
with temporary_docker_directory(
|
||||||
files,
|
files,
|
||||||
"datasette",
|
"datasette",
|
||||||
metadata=metadata,
|
metadata,
|
||||||
extra_options=extra_options,
|
extra_options,
|
||||||
branch=branch,
|
branch,
|
||||||
template_dir=template_dir,
|
template_dir,
|
||||||
plugins_dir=plugins_dir,
|
plugins_dir,
|
||||||
static=static,
|
static,
|
||||||
install=install,
|
install,
|
||||||
spatialite=spatialite,
|
spatialite,
|
||||||
version_note=version_note,
|
version_note,
|
||||||
secret=secret,
|
extra_metadata,
|
||||||
extra_metadata=extra_metadata,
|
|
||||||
port=port,
|
|
||||||
):
|
):
|
||||||
args = ["docker", "build"]
|
args = ["docker", "build"]
|
||||||
if tag:
|
if tag:
|
||||||
|
|
@ -309,48 +221,7 @@ def package(
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.argument("packages", nargs=-1)
|
@click.argument("files", type=click.Path(exists=True), nargs=-1)
|
||||||
@click.option(
|
|
||||||
"-U", "--upgrade", is_flag=True, help="Upgrade packages to latest version"
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"-r",
|
|
||||||
"--requirement",
|
|
||||||
type=click.Path(exists=True),
|
|
||||||
help="Install from requirements file",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"-e",
|
|
||||||
"--editable",
|
|
||||||
help="Install a project in editable mode from this path",
|
|
||||||
)
|
|
||||||
def install(packages, upgrade, requirement, editable):
|
|
||||||
"""Install plugins and packages from PyPI into the same environment as Datasette"""
|
|
||||||
if not packages and not requirement and not editable:
|
|
||||||
raise click.UsageError("Please specify at least one package to install")
|
|
||||||
args = ["pip", "install"]
|
|
||||||
if upgrade:
|
|
||||||
args += ["--upgrade"]
|
|
||||||
if editable:
|
|
||||||
args += ["--editable", editable]
|
|
||||||
if requirement:
|
|
||||||
args += ["-r", requirement]
|
|
||||||
args += list(packages)
|
|
||||||
sys.argv = args
|
|
||||||
run_module("pip", run_name="__main__")
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
|
||||||
@click.argument("packages", nargs=-1, required=True)
|
|
||||||
@click.option("-y", "--yes", is_flag=True, help="Don't ask for confirmation")
|
|
||||||
def uninstall(packages, yes):
|
|
||||||
"""Uninstall plugins and Python packages from the Datasette environment"""
|
|
||||||
sys.argv = ["pip", "uninstall"] + list(packages) + (["-y"] if yes else [])
|
|
||||||
run_module("pip", run_name="__main__")
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
|
||||||
@click.argument("files", type=click.Path(), nargs=-1)
|
|
||||||
@click.option(
|
@click.option(
|
||||||
"-i",
|
"-i",
|
||||||
"--immutable",
|
"--immutable",
|
||||||
|
|
@ -359,35 +230,28 @@ def uninstall(packages, yes):
|
||||||
multiple=True,
|
multiple=True,
|
||||||
)
|
)
|
||||||
@click.option(
|
@click.option(
|
||||||
"-h",
|
"-h", "--host", default="127.0.0.1", help="host for server, defaults to 127.0.0.1"
|
||||||
"--host",
|
|
||||||
default="127.0.0.1",
|
|
||||||
help=(
|
|
||||||
"Host for server. Defaults to 127.0.0.1 which means only connections "
|
|
||||||
"from the local machine will be allowed. Use 0.0.0.0 to listen to "
|
|
||||||
"all IPs and allow access from other machines."
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
@click.option("-p", "--port", default=8001, help="port for server, defaults to 8001")
|
||||||
@click.option(
|
@click.option(
|
||||||
"-p",
|
"--debug", is_flag=True, help="Enable debug mode - useful for development"
|
||||||
"--port",
|
|
||||||
default=8001,
|
|
||||||
type=click.IntRange(0, 65535),
|
|
||||||
help="Port for server, defaults to 8001. Use -p 0 to automatically assign an available port.",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--uds",
|
|
||||||
help="Bind to a Unix domain socket",
|
|
||||||
)
|
)
|
||||||
@click.option(
|
@click.option(
|
||||||
"--reload",
|
"--reload",
|
||||||
is_flag=True,
|
is_flag=True,
|
||||||
help="Automatically reload if code or metadata change detected - useful for development",
|
help="Automatically reload if database or code change detected - useful for development",
|
||||||
)
|
)
|
||||||
@click.option(
|
@click.option(
|
||||||
"--cors", is_flag=True, help="Enable CORS by serving Access-Control-Allow-Origin: *"
|
"--cors", is_flag=True, help="Enable CORS by serving Access-Control-Allow-Origin: *"
|
||||||
)
|
)
|
||||||
@sqlite_extensions
|
@click.option(
|
||||||
|
"sqlite_extensions",
|
||||||
|
"--load-extension",
|
||||||
|
envvar="SQLITE_EXTENSIONS",
|
||||||
|
multiple=True,
|
||||||
|
type=click.Path(exists=True, resolve_path=True),
|
||||||
|
help="Path to a SQLite extension to load",
|
||||||
|
)
|
||||||
@click.option(
|
@click.option(
|
||||||
"--inspect-file", help='Path to JSON file created using "datasette inspect"'
|
"--inspect-file", help='Path to JSON file created using "datasette inspect"'
|
||||||
)
|
)
|
||||||
|
|
@ -395,7 +259,7 @@ def uninstall(packages, yes):
|
||||||
"-m",
|
"-m",
|
||||||
"--metadata",
|
"--metadata",
|
||||||
type=click.File(mode="r"),
|
type=click.File(mode="r"),
|
||||||
help="Path to JSON/YAML file containing license/source metadata",
|
help="Path to JSON file containing license/source metadata",
|
||||||
)
|
)
|
||||||
@click.option(
|
@click.option(
|
||||||
"--template-dir",
|
"--template-dir",
|
||||||
|
|
@ -410,102 +274,24 @@ def uninstall(packages, yes):
|
||||||
@click.option(
|
@click.option(
|
||||||
"--static",
|
"--static",
|
||||||
type=StaticMount(),
|
type=StaticMount(),
|
||||||
help="Serve static files from this directory at /MOUNT/...",
|
help="mountpoint:path-to-directory for serving static files",
|
||||||
multiple=True,
|
multiple=True,
|
||||||
)
|
)
|
||||||
@click.option("--memory", is_flag=True, help="Make /_memory database available")
|
@click.option("--memory", is_flag=True, help="Make :memory: database available")
|
||||||
@click.option(
|
@click.option(
|
||||||
"-c",
|
|
||||||
"--config",
|
"--config",
|
||||||
type=click.File(mode="r"),
|
type=Config(),
|
||||||
help="Path to JSON/YAML Datasette configuration file",
|
help="Set config option using configname:value datasette.readthedocs.io/en/latest/config.html",
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"-s",
|
|
||||||
"--setting",
|
|
||||||
"settings",
|
|
||||||
type=Setting(),
|
|
||||||
help="nested.key, value setting to use in Datasette configuration",
|
|
||||||
multiple=True,
|
multiple=True,
|
||||||
)
|
)
|
||||||
@click.option(
|
|
||||||
"--secret",
|
|
||||||
help="Secret used for signing secure values, such as signed cookies",
|
|
||||||
envvar="DATASETTE_SECRET",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--root",
|
|
||||||
help="Output URL that sets a cookie authenticating the root user",
|
|
||||||
is_flag=True,
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--default-deny",
|
|
||||||
help="Deny all permissions by default",
|
|
||||||
is_flag=True,
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--get",
|
|
||||||
help="Run an HTTP GET request against this path, print results and exit",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--headers",
|
|
||||||
is_flag=True,
|
|
||||||
help="Include HTTP headers in --get output",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--token",
|
|
||||||
help="API token to send with --get requests",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--actor",
|
|
||||||
help="Actor to use for --get requests (JSON string)",
|
|
||||||
)
|
|
||||||
@click.option("--version-note", help="Additional note to show on /-/versions")
|
@click.option("--version-note", help="Additional note to show on /-/versions")
|
||||||
@click.option("--help-settings", is_flag=True, help="Show available settings")
|
@click.option("--help-config", is_flag=True, help="Show available config options")
|
||||||
@click.option("--pdb", is_flag=True, help="Launch debugger on any errors")
|
|
||||||
@click.option(
|
|
||||||
"-o",
|
|
||||||
"--open",
|
|
||||||
"open_browser",
|
|
||||||
is_flag=True,
|
|
||||||
help="Open Datasette in your web browser",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--create",
|
|
||||||
is_flag=True,
|
|
||||||
help="Create database files if they do not exist",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--crossdb",
|
|
||||||
is_flag=True,
|
|
||||||
help="Enable cross-database joins using the /_memory database",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--nolock",
|
|
||||||
is_flag=True,
|
|
||||||
help="Ignore locking, open locked files in read-only mode",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--ssl-keyfile",
|
|
||||||
help="SSL key file",
|
|
||||||
envvar="DATASETTE_SSL_KEYFILE",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--ssl-certfile",
|
|
||||||
help="SSL certificate file",
|
|
||||||
envvar="DATASETTE_SSL_CERTFILE",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--internal",
|
|
||||||
type=click.Path(),
|
|
||||||
help="Path to a persistent Datasette internal SQLite database",
|
|
||||||
)
|
|
||||||
def serve(
|
def serve(
|
||||||
files,
|
files,
|
||||||
immutable,
|
immutable,
|
||||||
host,
|
host,
|
||||||
port,
|
port,
|
||||||
uds,
|
debug,
|
||||||
reload,
|
reload,
|
||||||
cors,
|
cors,
|
||||||
sqlite_extensions,
|
sqlite_extensions,
|
||||||
|
|
@ -516,34 +302,17 @@ def serve(
|
||||||
static,
|
static,
|
||||||
memory,
|
memory,
|
||||||
config,
|
config,
|
||||||
settings,
|
|
||||||
secret,
|
|
||||||
root,
|
|
||||||
default_deny,
|
|
||||||
get,
|
|
||||||
headers,
|
|
||||||
token,
|
|
||||||
actor,
|
|
||||||
version_note,
|
version_note,
|
||||||
help_settings,
|
help_config,
|
||||||
pdb,
|
|
||||||
open_browser,
|
|
||||||
create,
|
|
||||||
crossdb,
|
|
||||||
nolock,
|
|
||||||
ssl_keyfile,
|
|
||||||
ssl_certfile,
|
|
||||||
internal,
|
|
||||||
return_instance=False,
|
|
||||||
):
|
):
|
||||||
"""Serve up specified SQLite database files with a web UI"""
|
"""Serve up specified SQLite database files with a web UI"""
|
||||||
if help_settings:
|
if help_config:
|
||||||
formatter = formatting.HelpFormatter()
|
formatter = formatting.HelpFormatter()
|
||||||
with formatter.section("Settings"):
|
with formatter.section("Config options"):
|
||||||
formatter.write_dl(
|
formatter.write_dl(
|
||||||
[
|
[
|
||||||
(option.name, f"{option.help} (default={option.default})")
|
(option.name, "{} (default={})".format(option.help, option.default))
|
||||||
for option in SETTINGS
|
for option in CONFIG_OPTIONS
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
click.echo(formatter.getvalue())
|
click.echo(formatter.getvalue())
|
||||||
|
|
@ -552,342 +321,38 @@ def serve(
|
||||||
import hupper
|
import hupper
|
||||||
|
|
||||||
reloader = hupper.start_reloader("datasette.cli.serve")
|
reloader = hupper.start_reloader("datasette.cli.serve")
|
||||||
if immutable:
|
reloader.watch_files(files)
|
||||||
reloader.watch_files(immutable)
|
|
||||||
if config:
|
|
||||||
reloader.watch_files([config.name])
|
|
||||||
if metadata:
|
if metadata:
|
||||||
reloader.watch_files([metadata.name])
|
reloader.watch_files([metadata.name])
|
||||||
|
|
||||||
inspect_data = None
|
inspect_data = None
|
||||||
if inspect_file:
|
if inspect_file:
|
||||||
with open(inspect_file) as fp:
|
inspect_data = json.load(open(inspect_file))
|
||||||
inspect_data = json.load(fp)
|
|
||||||
|
|
||||||
metadata_data = None
|
metadata_data = None
|
||||||
if metadata:
|
if metadata:
|
||||||
metadata_data = parse_metadata(metadata.read())
|
metadata_data = json.loads(metadata.read())
|
||||||
|
|
||||||
config_data = None
|
click.echo(
|
||||||
if config:
|
"Serve! files={} (immutables={}) on port {}".format(files, immutable, port)
|
||||||
config_data = parse_metadata(config.read())
|
)
|
||||||
|
ds = Datasette(
|
||||||
config_data = config_data or {}
|
files,
|
||||||
|
|
||||||
# Merge in settings from -s/--setting
|
|
||||||
if settings:
|
|
||||||
settings_updates = pairs_to_nested_config(settings)
|
|
||||||
# Merge recursively, to avoid over-writing nested values
|
|
||||||
# https://github.com/simonw/datasette/issues/2389
|
|
||||||
deep_dict_update(config_data, settings_updates)
|
|
||||||
|
|
||||||
kwargs = dict(
|
|
||||||
immutables=immutable,
|
immutables=immutable,
|
||||||
cache_headers=not reload,
|
cache_headers=not debug and not reload,
|
||||||
cors=cors,
|
cors=cors,
|
||||||
inspect_data=inspect_data,
|
inspect_data=inspect_data,
|
||||||
config=config_data,
|
|
||||||
metadata=metadata_data,
|
metadata=metadata_data,
|
||||||
sqlite_extensions=sqlite_extensions,
|
sqlite_extensions=sqlite_extensions,
|
||||||
template_dir=template_dir,
|
template_dir=template_dir,
|
||||||
plugins_dir=plugins_dir,
|
plugins_dir=plugins_dir,
|
||||||
static_mounts=static,
|
static_mounts=static,
|
||||||
settings=None, # These are passed in config= now
|
config=dict(config),
|
||||||
memory=memory,
|
memory=memory,
|
||||||
secret=secret,
|
|
||||||
version_note=version_note,
|
version_note=version_note,
|
||||||
pdb=pdb,
|
|
||||||
crossdb=crossdb,
|
|
||||||
nolock=nolock,
|
|
||||||
internal=internal,
|
|
||||||
default_deny=default_deny,
|
|
||||||
)
|
)
|
||||||
|
# Run async sanity checks - but only if we're not under pytest
|
||||||
# Separate directories from files
|
asyncio.get_event_loop().run_until_complete(ds.run_sanity_checks())
|
||||||
directories = [f for f in files if os.path.isdir(f)]
|
|
||||||
file_paths = [f for f in files if not os.path.isdir(f)]
|
|
||||||
|
|
||||||
# Handle config_dir - only one directory allowed
|
|
||||||
if len(directories) > 1:
|
|
||||||
raise click.ClickException(
|
|
||||||
"Cannot pass multiple directories. Pass a single directory as config_dir."
|
|
||||||
)
|
|
||||||
elif len(directories) == 1:
|
|
||||||
kwargs["config_dir"] = pathlib.Path(directories[0])
|
|
||||||
|
|
||||||
# Verify list of files, create if needed (and --create)
|
|
||||||
for file in file_paths:
|
|
||||||
if not pathlib.Path(file).exists():
|
|
||||||
if create:
|
|
||||||
sqlite3.connect(file).execute("vacuum")
|
|
||||||
else:
|
|
||||||
raise click.ClickException(
|
|
||||||
"Invalid value for '[FILES]...': Path '{}' does not exist.".format(
|
|
||||||
file
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check for duplicate files by resolving all paths to their absolute forms
|
|
||||||
# Collect all database files that will be loaded (explicit files + config_dir files)
|
|
||||||
all_db_files = []
|
|
||||||
|
|
||||||
# Add explicit files
|
|
||||||
for file in file_paths:
|
|
||||||
all_db_files.append((file, pathlib.Path(file).resolve()))
|
|
||||||
|
|
||||||
# Add config_dir databases if config_dir is set
|
|
||||||
if "config_dir" in kwargs:
|
|
||||||
config_dir = kwargs["config_dir"]
|
|
||||||
for ext in ("db", "sqlite", "sqlite3"):
|
|
||||||
for db_file in config_dir.glob(f"*.{ext}"):
|
|
||||||
all_db_files.append((str(db_file), db_file.resolve()))
|
|
||||||
|
|
||||||
# Check for duplicates
|
|
||||||
seen = {}
|
|
||||||
for original_path, resolved_path in all_db_files:
|
|
||||||
if resolved_path in seen:
|
|
||||||
raise click.ClickException(
|
|
||||||
f"Duplicate database file: '{original_path}' and '{seen[resolved_path]}' "
|
|
||||||
f"both refer to {resolved_path}"
|
|
||||||
)
|
|
||||||
seen[resolved_path] = original_path
|
|
||||||
|
|
||||||
files = file_paths
|
|
||||||
|
|
||||||
try:
|
|
||||||
ds = Datasette(files, **kwargs)
|
|
||||||
except SpatialiteNotFound:
|
|
||||||
raise click.ClickException("Could not find SpatiaLite extension")
|
|
||||||
except StartupError as e:
|
|
||||||
raise click.ClickException(e.args[0])
|
|
||||||
|
|
||||||
if return_instance:
|
|
||||||
# Private utility mechanism for writing unit tests
|
|
||||||
return ds
|
|
||||||
|
|
||||||
# Run the "startup" plugin hooks
|
|
||||||
run_sync(ds.invoke_startup)
|
|
||||||
|
|
||||||
# Run async soundness checks - but only if we're not under pytest
|
|
||||||
run_sync(lambda: check_databases(ds))
|
|
||||||
|
|
||||||
if headers and not get:
|
|
||||||
raise click.ClickException("--headers can only be used with --get")
|
|
||||||
|
|
||||||
if token and not get:
|
|
||||||
raise click.ClickException("--token can only be used with --get")
|
|
||||||
|
|
||||||
if get:
|
|
||||||
client = TestClient(ds)
|
|
||||||
request_headers = {}
|
|
||||||
if token:
|
|
||||||
request_headers["Authorization"] = "Bearer {}".format(token)
|
|
||||||
cookies = {}
|
|
||||||
if actor:
|
|
||||||
cookies["ds_actor"] = client.actor_cookie(json.loads(actor))
|
|
||||||
response = client.get(get, headers=request_headers, cookies=cookies)
|
|
||||||
|
|
||||||
if headers:
|
|
||||||
# Output HTTP status code, headers, two newlines, then the response body
|
|
||||||
click.echo(f"HTTP/1.1 {response.status}")
|
|
||||||
for key, value in response.headers.items():
|
|
||||||
click.echo(f"{key}: {value}")
|
|
||||||
if response.text:
|
|
||||||
click.echo()
|
|
||||||
click.echo(response.text)
|
|
||||||
else:
|
|
||||||
click.echo(response.text)
|
|
||||||
|
|
||||||
exit_code = 0 if response.status == 200 else 1
|
|
||||||
sys.exit(exit_code)
|
|
||||||
return
|
|
||||||
|
|
||||||
# Start the server
|
# Start the server
|
||||||
url = None
|
uvicorn.run(ds.app(), host=host, port=port, log_level="info")
|
||||||
if root:
|
|
||||||
ds.root_enabled = True
|
|
||||||
url = "http://{}:{}{}?token={}".format(
|
|
||||||
host, port, ds.urls.path("-/auth-token"), ds._root_token
|
|
||||||
)
|
|
||||||
click.echo(url)
|
|
||||||
if open_browser:
|
|
||||||
if url is None:
|
|
||||||
# Figure out most convenient URL - to table, database or homepage
|
|
||||||
path = run_sync(lambda: initial_path_for_datasette(ds))
|
|
||||||
url = f"http://{host}:{port}{path}"
|
|
||||||
webbrowser.open(url)
|
|
||||||
uvicorn_kwargs = dict(
|
|
||||||
host=host, port=port, log_level="info", lifespan="on", workers=1
|
|
||||||
)
|
|
||||||
if uds:
|
|
||||||
uvicorn_kwargs["uds"] = uds
|
|
||||||
if ssl_keyfile:
|
|
||||||
uvicorn_kwargs["ssl_keyfile"] = ssl_keyfile
|
|
||||||
if ssl_certfile:
|
|
||||||
uvicorn_kwargs["ssl_certfile"] = ssl_certfile
|
|
||||||
uvicorn.run(ds.app(), **uvicorn_kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
|
||||||
@click.argument("id")
|
|
||||||
@click.option(
|
|
||||||
"--secret",
|
|
||||||
help="Secret used for signing the API tokens",
|
|
||||||
envvar="DATASETTE_SECRET",
|
|
||||||
required=True,
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"-e",
|
|
||||||
"--expires-after",
|
|
||||||
help="Token should expire after this many seconds",
|
|
||||||
type=int,
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"alls",
|
|
||||||
"-a",
|
|
||||||
"--all",
|
|
||||||
type=str,
|
|
||||||
metavar="ACTION",
|
|
||||||
multiple=True,
|
|
||||||
help="Restrict token to this action",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"databases",
|
|
||||||
"-d",
|
|
||||||
"--database",
|
|
||||||
type=(str, str),
|
|
||||||
metavar="DB ACTION",
|
|
||||||
multiple=True,
|
|
||||||
help="Restrict token to this action on this database",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"resources",
|
|
||||||
"-r",
|
|
||||||
"--resource",
|
|
||||||
type=(str, str, str),
|
|
||||||
metavar="DB RESOURCE ACTION",
|
|
||||||
multiple=True,
|
|
||||||
help="Restrict token to this action on this database resource (a table, SQL view or named query)",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--debug",
|
|
||||||
help="Show decoded token",
|
|
||||||
is_flag=True,
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--plugins-dir",
|
|
||||||
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
|
||||||
help="Path to directory containing custom plugins",
|
|
||||||
)
|
|
||||||
def create_token(
|
|
||||||
id, secret, expires_after, alls, databases, resources, debug, plugins_dir
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Create a signed API token for the specified actor ID
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
datasette create-token root --secret mysecret
|
|
||||||
|
|
||||||
To allow only "view-database-download" for all databases:
|
|
||||||
|
|
||||||
\b
|
|
||||||
datasette create-token root --secret mysecret \\
|
|
||||||
--all view-database-download
|
|
||||||
|
|
||||||
To allow "create-table" against a specific database:
|
|
||||||
|
|
||||||
\b
|
|
||||||
datasette create-token root --secret mysecret \\
|
|
||||||
--database mydb create-table
|
|
||||||
|
|
||||||
To allow "insert-row" against a specific table:
|
|
||||||
|
|
||||||
\b
|
|
||||||
datasette create-token root --secret myscret \\
|
|
||||||
--resource mydb mytable insert-row
|
|
||||||
|
|
||||||
Restricted actions can be specified multiple times using
|
|
||||||
multiple --all, --database, and --resource options.
|
|
||||||
|
|
||||||
Add --debug to see a decoded version of the token.
|
|
||||||
"""
|
|
||||||
ds = Datasette(secret=secret, plugins_dir=plugins_dir)
|
|
||||||
|
|
||||||
# Run ds.invoke_startup() in an event loop
|
|
||||||
run_sync(ds.invoke_startup)
|
|
||||||
|
|
||||||
# Warn about any unknown actions
|
|
||||||
actions = []
|
|
||||||
actions.extend(alls)
|
|
||||||
actions.extend([p[1] for p in databases])
|
|
||||||
actions.extend([p[2] for p in resources])
|
|
||||||
for action in actions:
|
|
||||||
if not ds.actions.get(action):
|
|
||||||
click.secho(
|
|
||||||
f" Unknown permission: {action} ",
|
|
||||||
fg="red",
|
|
||||||
err=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
restrict_database = {}
|
|
||||||
for database, action in databases:
|
|
||||||
restrict_database.setdefault(database, []).append(action)
|
|
||||||
restrict_resource = {}
|
|
||||||
for database, resource, action in resources:
|
|
||||||
restrict_resource.setdefault(database, {}).setdefault(resource, []).append(
|
|
||||||
action
|
|
||||||
)
|
|
||||||
|
|
||||||
token = ds.create_token(
|
|
||||||
id,
|
|
||||||
expires_after=expires_after,
|
|
||||||
restrict_all=alls,
|
|
||||||
restrict_database=restrict_database,
|
|
||||||
restrict_resource=restrict_resource,
|
|
||||||
)
|
|
||||||
click.echo(token)
|
|
||||||
if debug:
|
|
||||||
encoded = token[len("dstok_") :]
|
|
||||||
click.echo("\nDecoded:\n")
|
|
||||||
click.echo(json.dumps(ds.unsign(encoded, namespace="token"), indent=2))
|
|
||||||
|
|
||||||
|
|
||||||
pm.hook.register_commands(cli=cli)
|
|
||||||
|
|
||||||
|
|
||||||
async def check_databases(ds):
|
|
||||||
# Run check_connection against every connected database
|
|
||||||
# to confirm they are all usable
|
|
||||||
for database in list(ds.databases.values()):
|
|
||||||
try:
|
|
||||||
await database.execute_fn(check_connection)
|
|
||||||
except SpatialiteConnectionProblem:
|
|
||||||
suggestion = ""
|
|
||||||
try:
|
|
||||||
find_spatialite()
|
|
||||||
suggestion = "\n\nTry adding the --load-extension=spatialite option."
|
|
||||||
except SpatialiteNotFound:
|
|
||||||
pass
|
|
||||||
raise click.UsageError(
|
|
||||||
"It looks like you're trying to load a SpatiaLite"
|
|
||||||
+ " database without first loading the SpatiaLite module."
|
|
||||||
+ suggestion
|
|
||||||
+ "\n\nRead more: https://docs.datasette.io/en/stable/spatialite.html"
|
|
||||||
)
|
|
||||||
except ConnectionProblem as e:
|
|
||||||
raise click.UsageError(
|
|
||||||
f"Connection to {database.path} failed check: {str(e.args[0])}"
|
|
||||||
)
|
|
||||||
# If --crossdb and more than SQLITE_LIMIT_ATTACHED show warning
|
|
||||||
if (
|
|
||||||
ds.crossdb
|
|
||||||
and len([db for db in ds.databases.values() if not db.is_memory])
|
|
||||||
> SQLITE_LIMIT_ATTACHED
|
|
||||||
):
|
|
||||||
msg = (
|
|
||||||
"Warning: --crossdb only works with the first {} attached databases".format(
|
|
||||||
SQLITE_LIMIT_ATTACHED
|
|
||||||
)
|
|
||||||
)
|
|
||||||
click.echo(click.style(msg, bold=True, fg="yellow"), err=True)
|
|
||||||
|
|
|
||||||
|
|
@ -1,380 +1,46 @@
|
||||||
import asyncio
|
|
||||||
from collections import namedtuple
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import janus
|
|
||||||
import queue
|
|
||||||
import sqlite_utils
|
|
||||||
import sys
|
|
||||||
import threading
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
from .tracer import trace
|
|
||||||
from .utils import (
|
from .utils import (
|
||||||
|
QueryInterrupted,
|
||||||
detect_fts,
|
detect_fts,
|
||||||
detect_primary_keys,
|
detect_primary_keys,
|
||||||
detect_spatialite,
|
detect_spatialite,
|
||||||
get_all_foreign_keys,
|
get_all_foreign_keys,
|
||||||
get_outbound_foreign_keys,
|
get_outbound_foreign_keys,
|
||||||
md5_not_usedforsecurity,
|
|
||||||
sqlite_timelimit,
|
|
||||||
sqlite3,
|
sqlite3,
|
||||||
table_columns,
|
table_columns,
|
||||||
table_column_details,
|
|
||||||
)
|
)
|
||||||
from .utils.sqlite import sqlite_version
|
|
||||||
from .inspect import inspect_hash
|
from .inspect import inspect_hash
|
||||||
|
|
||||||
connections = threading.local()
|
|
||||||
|
|
||||||
AttachedDatabase = namedtuple("AttachedDatabase", ("seq", "name", "file"))
|
|
||||||
|
|
||||||
|
|
||||||
class Database:
|
class Database:
|
||||||
# For table counts stop at this many rows:
|
def __init__(self, ds, path=None, is_mutable=False, is_memory=False):
|
||||||
count_limit = 10000
|
|
||||||
_thread_local_id_counter = 1
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
ds,
|
|
||||||
path=None,
|
|
||||||
is_mutable=True,
|
|
||||||
is_memory=False,
|
|
||||||
memory_name=None,
|
|
||||||
mode=None,
|
|
||||||
):
|
|
||||||
self.name = None
|
|
||||||
self._thread_local_id = f"x{self._thread_local_id_counter}"
|
|
||||||
Database._thread_local_id_counter += 1
|
|
||||||
self.route = None
|
|
||||||
self.ds = ds
|
self.ds = ds
|
||||||
self.path = path
|
self.path = path
|
||||||
self.is_mutable = is_mutable
|
self.is_mutable = is_mutable
|
||||||
self.is_memory = is_memory
|
self.is_memory = is_memory
|
||||||
self.memory_name = memory_name
|
self.hash = None
|
||||||
if memory_name is not None:
|
|
||||||
self.is_memory = True
|
|
||||||
self.cached_hash = None
|
|
||||||
self.cached_size = None
|
self.cached_size = None
|
||||||
self._cached_table_counts = None
|
self.cached_table_counts = None
|
||||||
self._write_thread = None
|
if not self.is_mutable:
|
||||||
self._write_queue = None
|
p = Path(path)
|
||||||
# These are used when in non-threaded mode:
|
self.hash = inspect_hash(p)
|
||||||
self._read_connection = None
|
self.cached_size = p.stat().st_size
|
||||||
self._write_connection = None
|
# Maybe use self.ds.inspect_data to populate cached_table_counts
|
||||||
# This is used to track all file connections so they can be closed
|
if self.ds.inspect_data and self.ds.inspect_data.get(self.name):
|
||||||
self._all_file_connections = []
|
self.cached_table_counts = {
|
||||||
self.mode = mode
|
key: value["count"]
|
||||||
|
for key, value in self.ds.inspect_data[self.name]["tables"].items()
|
||||||
@property
|
}
|
||||||
def cached_table_counts(self):
|
|
||||||
if self._cached_table_counts is not None:
|
|
||||||
return self._cached_table_counts
|
|
||||||
# Maybe use self.ds.inspect_data to populate cached_table_counts
|
|
||||||
if self.ds.inspect_data and self.ds.inspect_data.get(self.name):
|
|
||||||
self._cached_table_counts = {
|
|
||||||
key: value["count"]
|
|
||||||
for key, value in self.ds.inspect_data[self.name]["tables"].items()
|
|
||||||
}
|
|
||||||
return self._cached_table_counts
|
|
||||||
|
|
||||||
@property
|
|
||||||
def color(self):
|
|
||||||
if self.hash:
|
|
||||||
return self.hash[:6]
|
|
||||||
return md5_not_usedforsecurity(self.name)[:6]
|
|
||||||
|
|
||||||
def suggest_name(self):
|
|
||||||
if self.path:
|
|
||||||
return Path(self.path).stem
|
|
||||||
elif self.memory_name:
|
|
||||||
return self.memory_name
|
|
||||||
else:
|
|
||||||
return "db"
|
|
||||||
|
|
||||||
def connect(self, write=False):
|
|
||||||
extra_kwargs = {}
|
|
||||||
if write:
|
|
||||||
extra_kwargs["isolation_level"] = "IMMEDIATE"
|
|
||||||
if self.memory_name:
|
|
||||||
uri = "file:{}?mode=memory&cache=shared".format(self.memory_name)
|
|
||||||
conn = sqlite3.connect(
|
|
||||||
uri, uri=True, check_same_thread=False, **extra_kwargs
|
|
||||||
)
|
|
||||||
if not write:
|
|
||||||
conn.execute("PRAGMA query_only=1")
|
|
||||||
return conn
|
|
||||||
if self.is_memory:
|
|
||||||
return sqlite3.connect(":memory:", uri=True)
|
|
||||||
|
|
||||||
# mode=ro or immutable=1?
|
|
||||||
if self.is_mutable:
|
|
||||||
qs = "?mode=ro"
|
|
||||||
if self.ds.nolock:
|
|
||||||
qs += "&nolock=1"
|
|
||||||
else:
|
|
||||||
qs = "?immutable=1"
|
|
||||||
assert not (write and not self.is_mutable)
|
|
||||||
if write:
|
|
||||||
qs = ""
|
|
||||||
if self.mode is not None:
|
|
||||||
qs = f"?mode={self.mode}"
|
|
||||||
conn = sqlite3.connect(
|
|
||||||
f"file:{self.path}{qs}", uri=True, check_same_thread=False, **extra_kwargs
|
|
||||||
)
|
|
||||||
self._all_file_connections.append(conn)
|
|
||||||
return conn
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
# Close all connections - useful to avoid running out of file handles in tests
|
|
||||||
for connection in self._all_file_connections:
|
|
||||||
connection.close()
|
|
||||||
|
|
||||||
async def execute_write(self, sql, params=None, block=True):
|
|
||||||
def _inner(conn):
|
|
||||||
return conn.execute(sql, params or [])
|
|
||||||
|
|
||||||
with trace("sql", database=self.name, sql=sql.strip(), params=params):
|
|
||||||
results = await self.execute_write_fn(_inner, block=block)
|
|
||||||
return results
|
|
||||||
|
|
||||||
async def execute_write_script(self, sql, block=True):
|
|
||||||
def _inner(conn):
|
|
||||||
return conn.executescript(sql)
|
|
||||||
|
|
||||||
with trace("sql", database=self.name, sql=sql.strip(), executescript=True):
|
|
||||||
results = await self.execute_write_fn(
|
|
||||||
_inner, block=block, transaction=False
|
|
||||||
)
|
|
||||||
return results
|
|
||||||
|
|
||||||
async def execute_write_many(self, sql, params_seq, block=True):
|
|
||||||
def _inner(conn):
|
|
||||||
count = 0
|
|
||||||
|
|
||||||
def count_params(params):
|
|
||||||
nonlocal count
|
|
||||||
for param in params:
|
|
||||||
count += 1
|
|
||||||
yield param
|
|
||||||
|
|
||||||
return conn.executemany(sql, count_params(params_seq)), count
|
|
||||||
|
|
||||||
with trace(
|
|
||||||
"sql", database=self.name, sql=sql.strip(), executemany=True
|
|
||||||
) as kwargs:
|
|
||||||
results, count = await self.execute_write_fn(_inner, block=block)
|
|
||||||
kwargs["count"] = count
|
|
||||||
return results
|
|
||||||
|
|
||||||
async def execute_isolated_fn(self, fn):
|
|
||||||
# Open a new connection just for the duration of this function
|
|
||||||
# blocking the write queue to avoid any writes occurring during it
|
|
||||||
if self.ds.executor is None:
|
|
||||||
# non-threaded mode
|
|
||||||
isolated_connection = self.connect(write=True)
|
|
||||||
try:
|
|
||||||
result = fn(isolated_connection)
|
|
||||||
finally:
|
|
||||||
isolated_connection.close()
|
|
||||||
try:
|
|
||||||
self._all_file_connections.remove(isolated_connection)
|
|
||||||
except ValueError:
|
|
||||||
# Was probably a memory connection
|
|
||||||
pass
|
|
||||||
return result
|
|
||||||
else:
|
|
||||||
# Threaded mode - send to write thread
|
|
||||||
return await self._send_to_write_thread(fn, isolated_connection=True)
|
|
||||||
|
|
||||||
async def execute_write_fn(self, fn, block=True, transaction=True):
|
|
||||||
if self.ds.executor is None:
|
|
||||||
# non-threaded mode
|
|
||||||
if self._write_connection is None:
|
|
||||||
self._write_connection = self.connect(write=True)
|
|
||||||
self.ds._prepare_connection(self._write_connection, self.name)
|
|
||||||
if transaction:
|
|
||||||
with self._write_connection:
|
|
||||||
return fn(self._write_connection)
|
|
||||||
else:
|
|
||||||
return fn(self._write_connection)
|
|
||||||
else:
|
|
||||||
return await self._send_to_write_thread(
|
|
||||||
fn, block=block, transaction=transaction
|
|
||||||
)
|
|
||||||
|
|
||||||
async def _send_to_write_thread(
|
|
||||||
self, fn, block=True, isolated_connection=False, transaction=True
|
|
||||||
):
|
|
||||||
if self._write_queue is None:
|
|
||||||
self._write_queue = queue.Queue()
|
|
||||||
if self._write_thread is None:
|
|
||||||
self._write_thread = threading.Thread(
|
|
||||||
target=self._execute_writes, daemon=True
|
|
||||||
)
|
|
||||||
self._write_thread.name = "_execute_writes for database {}".format(
|
|
||||||
self.name
|
|
||||||
)
|
|
||||||
self._write_thread.start()
|
|
||||||
task_id = uuid.uuid5(uuid.NAMESPACE_DNS, "datasette.io")
|
|
||||||
reply_queue = janus.Queue()
|
|
||||||
self._write_queue.put(
|
|
||||||
WriteTask(fn, task_id, reply_queue, isolated_connection, transaction)
|
|
||||||
)
|
|
||||||
if block:
|
|
||||||
result = await reply_queue.async_q.get()
|
|
||||||
if isinstance(result, Exception):
|
|
||||||
raise result
|
|
||||||
else:
|
|
||||||
return result
|
|
||||||
else:
|
|
||||||
return task_id
|
|
||||||
|
|
||||||
def _execute_writes(self):
|
|
||||||
# Infinite looping thread that protects the single write connection
|
|
||||||
# to this database
|
|
||||||
conn_exception = None
|
|
||||||
conn = None
|
|
||||||
try:
|
|
||||||
conn = self.connect(write=True)
|
|
||||||
self.ds._prepare_connection(conn, self.name)
|
|
||||||
except Exception as e:
|
|
||||||
conn_exception = e
|
|
||||||
while True:
|
|
||||||
task = self._write_queue.get()
|
|
||||||
if conn_exception is not None:
|
|
||||||
result = conn_exception
|
|
||||||
else:
|
|
||||||
if task.isolated_connection:
|
|
||||||
isolated_connection = self.connect(write=True)
|
|
||||||
try:
|
|
||||||
result = task.fn(isolated_connection)
|
|
||||||
except Exception as e:
|
|
||||||
sys.stderr.write("{}\n".format(e))
|
|
||||||
sys.stderr.flush()
|
|
||||||
result = e
|
|
||||||
finally:
|
|
||||||
isolated_connection.close()
|
|
||||||
try:
|
|
||||||
self._all_file_connections.remove(isolated_connection)
|
|
||||||
except ValueError:
|
|
||||||
# Was probably a memory connection
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
if task.transaction:
|
|
||||||
with conn:
|
|
||||||
result = task.fn(conn)
|
|
||||||
else:
|
|
||||||
result = task.fn(conn)
|
|
||||||
except Exception as e:
|
|
||||||
sys.stderr.write("{}\n".format(e))
|
|
||||||
sys.stderr.flush()
|
|
||||||
result = e
|
|
||||||
task.reply_queue.sync_q.put(result)
|
|
||||||
|
|
||||||
async def execute_fn(self, fn):
|
|
||||||
if self.ds.executor is None:
|
|
||||||
# non-threaded mode
|
|
||||||
if self._read_connection is None:
|
|
||||||
self._read_connection = self.connect()
|
|
||||||
self.ds._prepare_connection(self._read_connection, self.name)
|
|
||||||
return fn(self._read_connection)
|
|
||||||
|
|
||||||
# threaded mode
|
|
||||||
def in_thread():
|
|
||||||
conn = getattr(connections, self._thread_local_id, None)
|
|
||||||
if not conn:
|
|
||||||
conn = self.connect()
|
|
||||||
self.ds._prepare_connection(conn, self.name)
|
|
||||||
setattr(connections, self._thread_local_id, conn)
|
|
||||||
return fn(conn)
|
|
||||||
|
|
||||||
return await asyncio.get_event_loop().run_in_executor(
|
|
||||||
self.ds.executor, in_thread
|
|
||||||
)
|
|
||||||
|
|
||||||
async def execute(
|
|
||||||
self,
|
|
||||||
sql,
|
|
||||||
params=None,
|
|
||||||
truncate=False,
|
|
||||||
custom_time_limit=None,
|
|
||||||
page_size=None,
|
|
||||||
log_sql_errors=True,
|
|
||||||
):
|
|
||||||
"""Executes sql against db_name in a thread"""
|
|
||||||
page_size = page_size or self.ds.page_size
|
|
||||||
|
|
||||||
def sql_operation_in_thread(conn):
|
|
||||||
time_limit_ms = self.ds.sql_time_limit_ms
|
|
||||||
if custom_time_limit and custom_time_limit < time_limit_ms:
|
|
||||||
time_limit_ms = custom_time_limit
|
|
||||||
|
|
||||||
with sqlite_timelimit(conn, time_limit_ms):
|
|
||||||
try:
|
|
||||||
cursor = conn.cursor()
|
|
||||||
cursor.execute(sql, params if params is not None else {})
|
|
||||||
max_returned_rows = self.ds.max_returned_rows
|
|
||||||
if max_returned_rows == page_size:
|
|
||||||
max_returned_rows += 1
|
|
||||||
if max_returned_rows and truncate:
|
|
||||||
rows = cursor.fetchmany(max_returned_rows + 1)
|
|
||||||
truncated = len(rows) > max_returned_rows
|
|
||||||
rows = rows[:max_returned_rows]
|
|
||||||
else:
|
|
||||||
rows = cursor.fetchall()
|
|
||||||
truncated = False
|
|
||||||
except (sqlite3.OperationalError, sqlite3.DatabaseError) as e:
|
|
||||||
if e.args == ("interrupted",):
|
|
||||||
raise QueryInterrupted(e, sql, params)
|
|
||||||
if log_sql_errors:
|
|
||||||
sys.stderr.write(
|
|
||||||
"ERROR: conn={}, sql = {}, params = {}: {}\n".format(
|
|
||||||
conn, repr(sql), params, e
|
|
||||||
)
|
|
||||||
)
|
|
||||||
sys.stderr.flush()
|
|
||||||
raise
|
|
||||||
|
|
||||||
if truncate:
|
|
||||||
return Results(rows, truncated, cursor.description)
|
|
||||||
|
|
||||||
else:
|
|
||||||
return Results(rows, False, cursor.description)
|
|
||||||
|
|
||||||
with trace("sql", database=self.name, sql=sql.strip(), params=params):
|
|
||||||
results = await self.execute_fn(sql_operation_in_thread)
|
|
||||||
return results
|
|
||||||
|
|
||||||
@property
|
|
||||||
def hash(self):
|
|
||||||
if self.cached_hash is not None:
|
|
||||||
return self.cached_hash
|
|
||||||
elif self.is_mutable or self.is_memory:
|
|
||||||
return None
|
|
||||||
elif self.ds.inspect_data and self.ds.inspect_data.get(self.name):
|
|
||||||
self.cached_hash = self.ds.inspect_data[self.name]["hash"]
|
|
||||||
return self.cached_hash
|
|
||||||
else:
|
|
||||||
p = Path(self.path)
|
|
||||||
self.cached_hash = inspect_hash(p)
|
|
||||||
return self.cached_hash
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def size(self):
|
def size(self):
|
||||||
|
if self.is_memory:
|
||||||
|
return 0
|
||||||
if self.cached_size is not None:
|
if self.cached_size is not None:
|
||||||
return self.cached_size
|
return self.cached_size
|
||||||
elif self.is_memory:
|
|
||||||
return 0
|
|
||||||
elif self.is_mutable:
|
|
||||||
return Path(self.path).stat().st_size
|
|
||||||
elif self.ds.inspect_data and self.ds.inspect_data.get(self.name):
|
|
||||||
self.cached_size = self.ds.inspect_data[self.name]["size"]
|
|
||||||
return self.cached_size
|
|
||||||
else:
|
else:
|
||||||
self.cached_size = Path(self.path).stat().st_size
|
return Path(self.path).stat().st_size
|
||||||
return self.cached_size
|
|
||||||
|
|
||||||
async def table_counts(self, limit=10):
|
async def table_counts(self, limit=10):
|
||||||
if not self.is_mutable and self.cached_table_counts is not None:
|
if not self.is_mutable and self.cached_table_counts is not None:
|
||||||
|
|
@ -384,225 +50,107 @@ class Database:
|
||||||
for table in await self.table_names():
|
for table in await self.table_names():
|
||||||
try:
|
try:
|
||||||
table_count = (
|
table_count = (
|
||||||
await self.execute(
|
await self.ds.execute(
|
||||||
f"select count(*) from (select * from [{table}] limit {self.count_limit + 1})",
|
self.name,
|
||||||
|
"select count(*) from [{}]".format(table),
|
||||||
custom_time_limit=limit,
|
custom_time_limit=limit,
|
||||||
)
|
)
|
||||||
).rows[0][0]
|
).rows[0][0]
|
||||||
counts[table] = table_count
|
counts[table] = table_count
|
||||||
# In some cases I saw "SQL Logic Error" here in addition to
|
# In some cases I saw "SQL Logic Error" here in addition to
|
||||||
# QueryInterrupted - so we catch that too:
|
# QueryInterrupted - so we catch that too:
|
||||||
except (QueryInterrupted, sqlite3.OperationalError, sqlite3.DatabaseError):
|
except (QueryInterrupted, sqlite3.OperationalError):
|
||||||
counts[table] = None
|
counts[table] = None
|
||||||
if not self.is_mutable:
|
if not self.is_mutable:
|
||||||
self._cached_table_counts = counts
|
self.cached_table_counts = counts
|
||||||
return counts
|
return counts
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def mtime_ns(self):
|
def mtime_ns(self):
|
||||||
if self.is_memory:
|
|
||||||
return None
|
|
||||||
return Path(self.path).stat().st_mtime_ns
|
return Path(self.path).stat().st_mtime_ns
|
||||||
|
|
||||||
async def attached_databases(self):
|
@property
|
||||||
# This used to be:
|
def name(self):
|
||||||
# select seq, name, file from pragma_database_list() where seq > 0
|
if self.is_memory:
|
||||||
# But SQLite prior to 3.16.0 doesn't support pragma functions
|
return ":memory:"
|
||||||
results = await self.execute("PRAGMA database_list;")
|
else:
|
||||||
# {'seq': 0, 'name': 'main', 'file': ''}
|
return Path(self.path).stem
|
||||||
return [
|
|
||||||
AttachedDatabase(*row)
|
|
||||||
for row in results.rows
|
|
||||||
# Filter out the SQLite internal "temp" database, refs #2557
|
|
||||||
if row["seq"] > 0 and row["name"] != "temp"
|
|
||||||
]
|
|
||||||
|
|
||||||
async def table_exists(self, table):
|
async def table_exists(self, table):
|
||||||
results = await self.execute(
|
results = await self.ds.execute(
|
||||||
"select 1 from sqlite_master where type='table' and name=?", params=(table,)
|
self.name,
|
||||||
)
|
"select 1 from sqlite_master where type='table' and name=?",
|
||||||
return bool(results.rows)
|
params=(table,),
|
||||||
|
|
||||||
async def view_exists(self, table):
|
|
||||||
results = await self.execute(
|
|
||||||
"select 1 from sqlite_master where type='view' and name=?", params=(table,)
|
|
||||||
)
|
)
|
||||||
return bool(results.rows)
|
return bool(results.rows)
|
||||||
|
|
||||||
async def table_names(self):
|
async def table_names(self):
|
||||||
results = await self.execute(
|
results = await self.ds.execute(
|
||||||
"select name from sqlite_master where type='table'"
|
self.name, "select name from sqlite_master where type='table'"
|
||||||
)
|
)
|
||||||
return [r[0] for r in results.rows]
|
return [r[0] for r in results.rows]
|
||||||
|
|
||||||
async def table_columns(self, table):
|
async def table_columns(self, table):
|
||||||
return await self.execute_fn(lambda conn: table_columns(conn, table))
|
return await self.ds.execute_against_connection_in_thread(
|
||||||
|
self.name, lambda conn: table_columns(conn, table)
|
||||||
async def table_column_details(self, table):
|
)
|
||||||
return await self.execute_fn(lambda conn: table_column_details(conn, table))
|
|
||||||
|
|
||||||
async def primary_keys(self, table):
|
async def primary_keys(self, table):
|
||||||
return await self.execute_fn(lambda conn: detect_primary_keys(conn, table))
|
return await self.ds.execute_against_connection_in_thread(
|
||||||
|
self.name, lambda conn: detect_primary_keys(conn, table)
|
||||||
|
)
|
||||||
|
|
||||||
async def fts_table(self, table):
|
async def fts_table(self, table):
|
||||||
return await self.execute_fn(lambda conn: detect_fts(conn, table))
|
return await self.ds.execute_against_connection_in_thread(
|
||||||
|
self.name, lambda conn: detect_fts(conn, table)
|
||||||
|
)
|
||||||
|
|
||||||
async def label_column_for_table(self, table):
|
async def label_column_for_table(self, table):
|
||||||
explicit_label_column = (await self.ds.table_config(self.name, table)).get(
|
explicit_label_column = self.ds.table_metadata(self.name, table).get(
|
||||||
"label_column"
|
"label_column"
|
||||||
)
|
)
|
||||||
if explicit_label_column:
|
if explicit_label_column:
|
||||||
return explicit_label_column
|
return explicit_label_column
|
||||||
|
# If a table has two columns, one of which is ID, then label_column is the other one
|
||||||
def column_details(conn):
|
column_names = await self.ds.execute_against_connection_in_thread(
|
||||||
# Returns {column_name: (type, is_unique)}
|
self.name, lambda conn: table_columns(conn, table)
|
||||||
db = sqlite_utils.Database(conn)
|
)
|
||||||
columns = db[table].columns_dict
|
|
||||||
indexes = db[table].indexes
|
|
||||||
details = {}
|
|
||||||
for name in columns:
|
|
||||||
is_unique = any(
|
|
||||||
index
|
|
||||||
for index in indexes
|
|
||||||
if index.columns == [name] and index.unique
|
|
||||||
)
|
|
||||||
details[name] = (columns[name], is_unique)
|
|
||||||
return details
|
|
||||||
|
|
||||||
column_details = await self.execute_fn(column_details)
|
|
||||||
# Is there just one unique column that's text?
|
|
||||||
unique_text_columns = [
|
|
||||||
name
|
|
||||||
for name, (type_, is_unique) in column_details.items()
|
|
||||||
if is_unique and type_ is str
|
|
||||||
]
|
|
||||||
if len(unique_text_columns) == 1:
|
|
||||||
return unique_text_columns[0]
|
|
||||||
|
|
||||||
column_names = list(column_details.keys())
|
|
||||||
# Is there a name or title column?
|
# Is there a name or title column?
|
||||||
name_or_title = [c for c in column_names if c.lower() in ("name", "title")]
|
name_or_title = [c for c in column_names if c in ("name", "title")]
|
||||||
if name_or_title:
|
if name_or_title:
|
||||||
return name_or_title[0]
|
return name_or_title[0]
|
||||||
# If a table has two columns, one of which is ID, then label_column is the other one
|
|
||||||
if (
|
if (
|
||||||
column_names
|
column_names
|
||||||
and len(column_names) == 2
|
and len(column_names) == 2
|
||||||
and ("id" in column_names or "pk" in column_names)
|
and ("id" in column_names or "pk" in column_names)
|
||||||
and not set(column_names) == {"id", "pk"}
|
|
||||||
):
|
):
|
||||||
return [c for c in column_names if c not in ("id", "pk")][0]
|
return [c for c in column_names if c not in ("id", "pk")][0]
|
||||||
# Couldn't find a label:
|
# Couldn't find a label:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
async def foreign_keys_for_table(self, table):
|
async def foreign_keys_for_table(self, table):
|
||||||
return await self.execute_fn(
|
return await self.ds.execute_against_connection_in_thread(
|
||||||
lambda conn: get_outbound_foreign_keys(conn, table)
|
self.name, lambda conn: get_outbound_foreign_keys(conn, table)
|
||||||
)
|
)
|
||||||
|
|
||||||
async def hidden_table_names(self):
|
async def hidden_table_names(self):
|
||||||
hidden_tables = []
|
# Mark tables 'hidden' if they relate to FTS virtual tables
|
||||||
# Add any tables marked as hidden in config
|
hidden_tables = [
|
||||||
db_config = self.ds.config.get("databases", {}).get(self.name, {})
|
r[0]
|
||||||
if "tables" in db_config:
|
for r in (
|
||||||
hidden_tables += [
|
await self.ds.execute(
|
||||||
t for t in db_config["tables"] if db_config["tables"][t].get("hidden")
|
self.name,
|
||||||
]
|
|
||||||
|
|
||||||
if sqlite_version()[1] >= 37:
|
|
||||||
hidden_tables += [
|
|
||||||
x[0]
|
|
||||||
for x in await self.execute(
|
|
||||||
"""
|
|
||||||
with shadow_tables as (
|
|
||||||
select name
|
|
||||||
from pragma_table_list
|
|
||||||
where [type] = 'shadow'
|
|
||||||
order by name
|
|
||||||
),
|
|
||||||
core_tables as (
|
|
||||||
select name
|
|
||||||
from sqlite_master
|
|
||||||
WHERE name in ('sqlite_stat1', 'sqlite_stat2', 'sqlite_stat3', 'sqlite_stat4')
|
|
||||||
OR substr(name, 1, 1) == '_'
|
|
||||||
),
|
|
||||||
combined as (
|
|
||||||
select name from shadow_tables
|
|
||||||
union all
|
|
||||||
select name from core_tables
|
|
||||||
)
|
|
||||||
select name from combined order by 1
|
|
||||||
"""
|
"""
|
||||||
|
select name from sqlite_master
|
||||||
|
where rootpage = 0
|
||||||
|
and sql like '%VIRTUAL TABLE%USING FTS%'
|
||||||
|
""",
|
||||||
)
|
)
|
||||||
]
|
).rows
|
||||||
else:
|
|
||||||
hidden_tables += [
|
|
||||||
x[0]
|
|
||||||
for x in await self.execute(
|
|
||||||
"""
|
|
||||||
WITH base AS (
|
|
||||||
SELECT name
|
|
||||||
FROM sqlite_master
|
|
||||||
WHERE name IN ('sqlite_stat1', 'sqlite_stat2', 'sqlite_stat3', 'sqlite_stat4')
|
|
||||||
OR substr(name, 1, 1) == '_'
|
|
||||||
),
|
|
||||||
fts_suffixes AS (
|
|
||||||
SELECT column1 AS suffix
|
|
||||||
FROM (VALUES ('_data'), ('_idx'), ('_docsize'), ('_content'), ('_config'))
|
|
||||||
),
|
|
||||||
fts5_names AS (
|
|
||||||
SELECT name
|
|
||||||
FROM sqlite_master
|
|
||||||
WHERE sql LIKE '%VIRTUAL TABLE%USING FTS%'
|
|
||||||
),
|
|
||||||
fts5_shadow_tables AS (
|
|
||||||
SELECT
|
|
||||||
printf('%s%s', fts5_names.name, fts_suffixes.suffix) AS name
|
|
||||||
FROM fts5_names
|
|
||||||
JOIN fts_suffixes
|
|
||||||
),
|
|
||||||
fts3_suffixes AS (
|
|
||||||
SELECT column1 AS suffix
|
|
||||||
FROM (VALUES ('_content'), ('_segdir'), ('_segments'), ('_stat'), ('_docsize'))
|
|
||||||
),
|
|
||||||
fts3_names AS (
|
|
||||||
SELECT name
|
|
||||||
FROM sqlite_master
|
|
||||||
WHERE sql LIKE '%VIRTUAL TABLE%USING FTS3%'
|
|
||||||
OR sql LIKE '%VIRTUAL TABLE%USING FTS4%'
|
|
||||||
),
|
|
||||||
fts3_shadow_tables AS (
|
|
||||||
SELECT
|
|
||||||
printf('%s%s', fts3_names.name, fts3_suffixes.suffix) AS name
|
|
||||||
FROM fts3_names
|
|
||||||
JOIN fts3_suffixes
|
|
||||||
),
|
|
||||||
final AS (
|
|
||||||
SELECT name FROM base
|
|
||||||
UNION ALL
|
|
||||||
SELECT name FROM fts5_shadow_tables
|
|
||||||
UNION ALL
|
|
||||||
SELECT name FROM fts3_shadow_tables
|
|
||||||
)
|
|
||||||
SELECT name FROM final ORDER BY 1
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
]
|
|
||||||
# Also hide any FTS tables that have a content= argument
|
|
||||||
hidden_tables += [
|
|
||||||
x[0]
|
|
||||||
for x in await self.execute(
|
|
||||||
"""
|
|
||||||
SELECT name
|
|
||||||
FROM sqlite_master
|
|
||||||
WHERE sql LIKE '%VIRTUAL TABLE%'
|
|
||||||
AND sql LIKE '%USING FTS%'
|
|
||||||
AND sql LIKE '%content=%'
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
]
|
]
|
||||||
|
has_spatialite = await self.ds.execute_against_connection_in_thread(
|
||||||
has_spatialite = await self.execute_fn(detect_spatialite)
|
self.name, detect_spatialite
|
||||||
|
)
|
||||||
if has_spatialite:
|
if has_spatialite:
|
||||||
# Also hide Spatialite internal tables
|
# Also hide Spatialite internal tables
|
||||||
hidden_tables += [
|
hidden_tables += [
|
||||||
|
|
@ -615,51 +163,64 @@ class Database:
|
||||||
"sqlite_sequence",
|
"sqlite_sequence",
|
||||||
"views_geometry_columns",
|
"views_geometry_columns",
|
||||||
"virts_geometry_columns",
|
"virts_geometry_columns",
|
||||||
"data_licenses",
|
|
||||||
"KNN",
|
|
||||||
"KNN2",
|
|
||||||
] + [
|
] + [
|
||||||
r[0]
|
r[0]
|
||||||
for r in (
|
for r in (
|
||||||
await self.execute(
|
await self.ds.execute(
|
||||||
|
self.name,
|
||||||
"""
|
"""
|
||||||
select name from sqlite_master
|
select name from sqlite_master
|
||||||
where name like "idx_%"
|
where name like "idx_%"
|
||||||
and type = "table"
|
and type = "table"
|
||||||
"""
|
""",
|
||||||
)
|
)
|
||||||
).rows
|
).rows
|
||||||
]
|
]
|
||||||
|
# Add any from metadata.json
|
||||||
|
db_metadata = self.ds.metadata(database=self.name)
|
||||||
|
if "tables" in db_metadata:
|
||||||
|
hidden_tables += [
|
||||||
|
t
|
||||||
|
for t in db_metadata["tables"]
|
||||||
|
if db_metadata["tables"][t].get("hidden")
|
||||||
|
]
|
||||||
|
# Also mark as hidden any tables which start with the name of a hidden table
|
||||||
|
# e.g. "searchable_fts" implies "searchable_fts_content" should be hidden
|
||||||
|
for table_name in await self.table_names():
|
||||||
|
for hidden_table in hidden_tables[:]:
|
||||||
|
if table_name.startswith(hidden_table):
|
||||||
|
hidden_tables.append(table_name)
|
||||||
|
continue
|
||||||
|
|
||||||
return hidden_tables
|
return hidden_tables
|
||||||
|
|
||||||
async def view_names(self):
|
async def view_names(self):
|
||||||
results = await self.execute("select name from sqlite_master where type='view'")
|
results = await self.ds.execute(
|
||||||
|
self.name, "select name from sqlite_master where type='view'"
|
||||||
|
)
|
||||||
return [r[0] for r in results.rows]
|
return [r[0] for r in results.rows]
|
||||||
|
|
||||||
async def get_all_foreign_keys(self):
|
async def get_all_foreign_keys(self):
|
||||||
return await self.execute_fn(get_all_foreign_keys)
|
return await self.ds.execute_against_connection_in_thread(
|
||||||
|
self.name, get_all_foreign_keys
|
||||||
|
)
|
||||||
|
|
||||||
|
async def get_outbound_foreign_keys(self, table):
|
||||||
|
return await self.ds.execute_against_connection_in_thread(
|
||||||
|
self.name, lambda conn: get_outbound_foreign_keys(conn, table)
|
||||||
|
)
|
||||||
|
|
||||||
async def get_table_definition(self, table, type_="table"):
|
async def get_table_definition(self, table, type_="table"):
|
||||||
table_definition_rows = list(
|
table_definition_rows = list(
|
||||||
await self.execute(
|
await self.ds.execute(
|
||||||
|
self.name,
|
||||||
"select sql from sqlite_master where name = :n and type=:t",
|
"select sql from sqlite_master where name = :n and type=:t",
|
||||||
{"n": table, "t": type_},
|
{"n": table, "t": type_},
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
if not table_definition_rows:
|
if not table_definition_rows:
|
||||||
return None
|
return None
|
||||||
bits = [table_definition_rows[0][0] + ";"]
|
return table_definition_rows[0][0]
|
||||||
# Add on any indexes
|
|
||||||
index_rows = list(
|
|
||||||
await self.execute(
|
|
||||||
"select sql from sqlite_master where tbl_name = :n and type='index' and sql is not null",
|
|
||||||
{"n": table},
|
|
||||||
)
|
|
||||||
)
|
|
||||||
for index_row in index_rows:
|
|
||||||
bits.append(index_row[0] + ";")
|
|
||||||
return "\n".join(bits)
|
|
||||||
|
|
||||||
async def get_view_definition(self, view):
|
async def get_view_definition(self, view):
|
||||||
return await self.get_table_definition(view, "view")
|
return await self.get_table_definition(view, "view")
|
||||||
|
|
@ -671,67 +232,10 @@ class Database:
|
||||||
if self.is_memory:
|
if self.is_memory:
|
||||||
tags.append("memory")
|
tags.append("memory")
|
||||||
if self.hash:
|
if self.hash:
|
||||||
tags.append(f"hash={self.hash}")
|
tags.append("hash={}".format(self.hash))
|
||||||
if self.size is not None:
|
if self.size is not None:
|
||||||
tags.append(f"size={self.size}")
|
tags.append("size={}".format(self.size))
|
||||||
tags_str = ""
|
tags_str = ""
|
||||||
if tags:
|
if tags:
|
||||||
tags_str = f" ({', '.join(tags)})"
|
tags_str = " ({})".format(", ".join(tags))
|
||||||
return f"<Database: {self.name}{tags_str}>"
|
return "<Database: {}{}>".format(self.name, tags_str)
|
||||||
|
|
||||||
|
|
||||||
class WriteTask:
|
|
||||||
__slots__ = ("fn", "task_id", "reply_queue", "isolated_connection", "transaction")
|
|
||||||
|
|
||||||
def __init__(self, fn, task_id, reply_queue, isolated_connection, transaction):
|
|
||||||
self.fn = fn
|
|
||||||
self.task_id = task_id
|
|
||||||
self.reply_queue = reply_queue
|
|
||||||
self.isolated_connection = isolated_connection
|
|
||||||
self.transaction = transaction
|
|
||||||
|
|
||||||
|
|
||||||
class QueryInterrupted(Exception):
|
|
||||||
def __init__(self, e, sql, params):
|
|
||||||
self.e = e
|
|
||||||
self.sql = sql
|
|
||||||
self.params = params
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return "QueryInterrupted: {}".format(self.e)
|
|
||||||
|
|
||||||
|
|
||||||
class MultipleValues(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class Results:
|
|
||||||
def __init__(self, rows, truncated, description):
|
|
||||||
self.rows = rows
|
|
||||||
self.truncated = truncated
|
|
||||||
self.description = description
|
|
||||||
|
|
||||||
@property
|
|
||||||
def columns(self):
|
|
||||||
return [d[0] for d in self.description]
|
|
||||||
|
|
||||||
def first(self):
|
|
||||||
if self.rows:
|
|
||||||
return self.rows[0]
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def single_value(self):
|
|
||||||
if self.rows and 1 == len(self.rows) and 1 == len(self.rows[0]):
|
|
||||||
return self.rows[0][0]
|
|
||||||
else:
|
|
||||||
raise MultipleValues
|
|
||||||
|
|
||||||
def dicts(self):
|
|
||||||
return [dict(row) for row in self.rows]
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
return iter(self.rows)
|
|
||||||
|
|
||||||
def __len__(self):
|
|
||||||
return len(self.rows)
|
|
||||||
|
|
|
||||||
|
|
@ -1,101 +0,0 @@
|
||||||
from datasette import hookimpl
|
|
||||||
from datasette.permissions import Action
|
|
||||||
from datasette.resources import (
|
|
||||||
DatabaseResource,
|
|
||||||
TableResource,
|
|
||||||
QueryResource,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl
|
|
||||||
def register_actions():
|
|
||||||
"""Register the core Datasette actions."""
|
|
||||||
return (
|
|
||||||
# Global actions (no resource_class)
|
|
||||||
Action(
|
|
||||||
name="view-instance",
|
|
||||||
abbr="vi",
|
|
||||||
description="View Datasette instance",
|
|
||||||
),
|
|
||||||
Action(
|
|
||||||
name="permissions-debug",
|
|
||||||
abbr="pd",
|
|
||||||
description="Access permission debug tool",
|
|
||||||
),
|
|
||||||
Action(
|
|
||||||
name="debug-menu",
|
|
||||||
abbr="dm",
|
|
||||||
description="View debug menu items",
|
|
||||||
),
|
|
||||||
# Database-level actions (parent-level)
|
|
||||||
Action(
|
|
||||||
name="view-database",
|
|
||||||
abbr="vd",
|
|
||||||
description="View database",
|
|
||||||
resource_class=DatabaseResource,
|
|
||||||
),
|
|
||||||
Action(
|
|
||||||
name="view-database-download",
|
|
||||||
abbr="vdd",
|
|
||||||
description="Download database file",
|
|
||||||
resource_class=DatabaseResource,
|
|
||||||
also_requires="view-database",
|
|
||||||
),
|
|
||||||
Action(
|
|
||||||
name="execute-sql",
|
|
||||||
abbr="es",
|
|
||||||
description="Execute read-only SQL queries",
|
|
||||||
resource_class=DatabaseResource,
|
|
||||||
also_requires="view-database",
|
|
||||||
),
|
|
||||||
Action(
|
|
||||||
name="create-table",
|
|
||||||
abbr="ct",
|
|
||||||
description="Create tables",
|
|
||||||
resource_class=DatabaseResource,
|
|
||||||
),
|
|
||||||
# Table-level actions (child-level)
|
|
||||||
Action(
|
|
||||||
name="view-table",
|
|
||||||
abbr="vt",
|
|
||||||
description="View table",
|
|
||||||
resource_class=TableResource,
|
|
||||||
),
|
|
||||||
Action(
|
|
||||||
name="insert-row",
|
|
||||||
abbr="ir",
|
|
||||||
description="Insert rows",
|
|
||||||
resource_class=TableResource,
|
|
||||||
),
|
|
||||||
Action(
|
|
||||||
name="delete-row",
|
|
||||||
abbr="dr",
|
|
||||||
description="Delete rows",
|
|
||||||
resource_class=TableResource,
|
|
||||||
),
|
|
||||||
Action(
|
|
||||||
name="update-row",
|
|
||||||
abbr="ur",
|
|
||||||
description="Update rows",
|
|
||||||
resource_class=TableResource,
|
|
||||||
),
|
|
||||||
Action(
|
|
||||||
name="alter-table",
|
|
||||||
abbr="at",
|
|
||||||
description="Alter tables",
|
|
||||||
resource_class=TableResource,
|
|
||||||
),
|
|
||||||
Action(
|
|
||||||
name="drop-table",
|
|
||||||
abbr="dt",
|
|
||||||
description="Drop tables",
|
|
||||||
resource_class=TableResource,
|
|
||||||
),
|
|
||||||
# Query-level actions (child-level)
|
|
||||||
Action(
|
|
||||||
name="view-query",
|
|
||||||
abbr="vq",
|
|
||||||
description="View named query results",
|
|
||||||
resource_class=QueryResource,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
@ -1,57 +0,0 @@
|
||||||
from datasette import hookimpl
|
|
||||||
import datetime
|
|
||||||
import os
|
|
||||||
import time
|
|
||||||
|
|
||||||
|
|
||||||
def header(key, request):
|
|
||||||
key = key.replace("_", "-").encode("utf-8")
|
|
||||||
headers_dict = dict(request.scope["headers"])
|
|
||||||
return headers_dict.get(key, b"").decode("utf-8")
|
|
||||||
|
|
||||||
|
|
||||||
def actor(key, request):
|
|
||||||
if request.actor is None:
|
|
||||||
raise KeyError
|
|
||||||
return request.actor[key]
|
|
||||||
|
|
||||||
|
|
||||||
def cookie(key, request):
|
|
||||||
return request.cookies[key]
|
|
||||||
|
|
||||||
|
|
||||||
def now(key, request):
|
|
||||||
if key == "epoch":
|
|
||||||
return int(time.time())
|
|
||||||
elif key == "date_utc":
|
|
||||||
return datetime.datetime.now(datetime.timezone.utc).date().isoformat()
|
|
||||||
elif key == "datetime_utc":
|
|
||||||
return (
|
|
||||||
datetime.datetime.now(datetime.timezone.utc).strftime(r"%Y-%m-%dT%H:%M:%S")
|
|
||||||
+ "Z"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
raise KeyError
|
|
||||||
|
|
||||||
|
|
||||||
def random(key, request):
|
|
||||||
if key.startswith("chars_") and key.split("chars_")[-1].isdigit():
|
|
||||||
num_chars = int(key.split("chars_")[-1])
|
|
||||||
if num_chars % 2 == 1:
|
|
||||||
urandom_len = (num_chars + 1) / 2
|
|
||||||
else:
|
|
||||||
urandom_len = num_chars / 2
|
|
||||||
return os.urandom(int(urandom_len)).hex()[:num_chars]
|
|
||||||
else:
|
|
||||||
raise KeyError
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl
|
|
||||||
def register_magic_parameters():
|
|
||||||
return [
|
|
||||||
("header", header),
|
|
||||||
("actor", actor),
|
|
||||||
("cookie", cookie),
|
|
||||||
("now", now),
|
|
||||||
("random", random),
|
|
||||||
]
|
|
||||||
|
|
@ -1,41 +0,0 @@
|
||||||
from datasette import hookimpl
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl
|
|
||||||
def menu_links(datasette, actor):
|
|
||||||
async def inner():
|
|
||||||
if not await datasette.allowed(action="debug-menu", actor=actor):
|
|
||||||
return []
|
|
||||||
|
|
||||||
return [
|
|
||||||
{"href": datasette.urls.path("/-/databases"), "label": "Databases"},
|
|
||||||
{
|
|
||||||
"href": datasette.urls.path("/-/plugins"),
|
|
||||||
"label": "Installed plugins",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"href": datasette.urls.path("/-/versions"),
|
|
||||||
"label": "Version info",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"href": datasette.urls.path("/-/settings"),
|
|
||||||
"label": "Settings",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"href": datasette.urls.path("/-/permissions"),
|
|
||||||
"label": "Debug permissions",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"href": datasette.urls.path("/-/messages"),
|
|
||||||
"label": "Debug messages",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"href": datasette.urls.path("/-/allow-debug"),
|
|
||||||
"label": "Debug allow rules",
|
|
||||||
},
|
|
||||||
{"href": datasette.urls.path("/-/threads"), "label": "Debug threads"},
|
|
||||||
{"href": datasette.urls.path("/-/actor"), "label": "Debug actor"},
|
|
||||||
{"href": datasette.urls.path("/-/patterns"), "label": "Pattern portfolio"},
|
|
||||||
]
|
|
||||||
|
|
||||||
return inner
|
|
||||||
|
|
@ -1,59 +0,0 @@
|
||||||
"""
|
|
||||||
Default permission implementations for Datasette.
|
|
||||||
|
|
||||||
This module provides the built-in permission checking logic through implementations
|
|
||||||
of the permission_resources_sql hook. The hooks are organized by their purpose:
|
|
||||||
|
|
||||||
1. Actor Restrictions - Enforces _r allowlists embedded in actor tokens
|
|
||||||
2. Root User - Grants full access when --root flag is used
|
|
||||||
3. Config Rules - Applies permissions from datasette.yaml
|
|
||||||
4. Default Settings - Enforces default_allow_sql and default view permissions
|
|
||||||
|
|
||||||
IMPORTANT: These hooks return PermissionSQL objects that are combined using SQL
|
|
||||||
UNION/INTERSECT operations. The order of evaluation is:
|
|
||||||
- restriction_sql fields are INTERSECTed (all must match)
|
|
||||||
- Regular sql fields are UNIONed and evaluated with cascading priority
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import TYPE_CHECKING, Optional
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from datasette.app import Datasette
|
|
||||||
|
|
||||||
from datasette import hookimpl
|
|
||||||
|
|
||||||
# Re-export all hooks and public utilities
|
|
||||||
from .restrictions import (
|
|
||||||
actor_restrictions_sql,
|
|
||||||
restrictions_allow_action,
|
|
||||||
ActorRestrictions,
|
|
||||||
)
|
|
||||||
from .root import root_user_permissions_sql
|
|
||||||
from .config import config_permissions_sql
|
|
||||||
from .defaults import (
|
|
||||||
default_allow_sql_check,
|
|
||||||
default_action_permissions_sql,
|
|
||||||
DEFAULT_ALLOW_ACTIONS,
|
|
||||||
)
|
|
||||||
from .tokens import actor_from_signed_api_token
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl
|
|
||||||
def skip_csrf(scope) -> Optional[bool]:
|
|
||||||
"""Skip CSRF check for JSON content-type requests."""
|
|
||||||
if scope["type"] == "http":
|
|
||||||
headers = scope.get("headers") or {}
|
|
||||||
if dict(headers).get(b"content-type") == b"application/json":
|
|
||||||
return True
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl
|
|
||||||
def canned_queries(datasette: "Datasette", database: str, actor) -> dict:
|
|
||||||
"""Return canned queries defined in datasette.yaml configuration."""
|
|
||||||
queries = (
|
|
||||||
((datasette.config or {}).get("databases") or {}).get(database) or {}
|
|
||||||
).get("queries") or {}
|
|
||||||
return queries
|
|
||||||
|
|
@ -1,442 +0,0 @@
|
||||||
"""
|
|
||||||
Config-based permission handling for Datasette.
|
|
||||||
|
|
||||||
Applies permission rules from datasette.yaml configuration.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import TYPE_CHECKING, Any, List, Optional, Set, Tuple
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from datasette.app import Datasette
|
|
||||||
|
|
||||||
from datasette import hookimpl
|
|
||||||
from datasette.permissions import PermissionSQL
|
|
||||||
from datasette.utils import actor_matches_allow
|
|
||||||
|
|
||||||
from .helpers import PermissionRowCollector, get_action_name_variants
|
|
||||||
|
|
||||||
|
|
||||||
class ConfigPermissionProcessor:
|
|
||||||
"""
|
|
||||||
Processes permission rules from datasette.yaml configuration.
|
|
||||||
|
|
||||||
Configuration structure:
|
|
||||||
|
|
||||||
permissions: # Root-level permissions block
|
|
||||||
view-instance:
|
|
||||||
id: admin
|
|
||||||
|
|
||||||
databases:
|
|
||||||
mydb:
|
|
||||||
permissions: # Database-level permissions
|
|
||||||
view-database:
|
|
||||||
id: admin
|
|
||||||
allow: # Database-level allow block (for view-*)
|
|
||||||
id: viewer
|
|
||||||
allow_sql: # execute-sql allow block
|
|
||||||
id: analyst
|
|
||||||
tables:
|
|
||||||
users:
|
|
||||||
permissions: # Table-level permissions
|
|
||||||
view-table:
|
|
||||||
id: admin
|
|
||||||
allow: # Table-level allow block
|
|
||||||
id: viewer
|
|
||||||
queries:
|
|
||||||
my_query:
|
|
||||||
permissions: # Query-level permissions
|
|
||||||
view-query:
|
|
||||||
id: admin
|
|
||||||
allow: # Query-level allow block
|
|
||||||
id: viewer
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
datasette: "Datasette",
|
|
||||||
actor: Optional[dict],
|
|
||||||
action: str,
|
|
||||||
):
|
|
||||||
self.datasette = datasette
|
|
||||||
self.actor = actor
|
|
||||||
self.action = action
|
|
||||||
self.config = datasette.config or {}
|
|
||||||
self.collector = PermissionRowCollector(prefix="cfg")
|
|
||||||
|
|
||||||
# Pre-compute action variants
|
|
||||||
self.action_checks = get_action_name_variants(datasette, action)
|
|
||||||
self.action_obj = datasette.actions.get(action)
|
|
||||||
|
|
||||||
# Parse restrictions if present
|
|
||||||
self.has_restrictions = actor and "_r" in actor if actor else False
|
|
||||||
self.restrictions = actor.get("_r", {}) if actor else {}
|
|
||||||
|
|
||||||
# Pre-compute restriction info for efficiency
|
|
||||||
self.restricted_databases: Set[str] = set()
|
|
||||||
self.restricted_tables: Set[Tuple[str, str]] = set()
|
|
||||||
|
|
||||||
if self.has_restrictions:
|
|
||||||
self.restricted_databases = {
|
|
||||||
db_name
|
|
||||||
for db_name, db_actions in (self.restrictions.get("d") or {}).items()
|
|
||||||
if self.action_checks.intersection(db_actions)
|
|
||||||
}
|
|
||||||
self.restricted_tables = {
|
|
||||||
(db_name, table_name)
|
|
||||||
for db_name, tables in (self.restrictions.get("r") or {}).items()
|
|
||||||
for table_name, table_actions in tables.items()
|
|
||||||
if self.action_checks.intersection(table_actions)
|
|
||||||
}
|
|
||||||
# Tables implicitly reference their parent databases
|
|
||||||
self.restricted_databases.update(db for db, _ in self.restricted_tables)
|
|
||||||
|
|
||||||
def evaluate_allow_block(self, allow_block: Any) -> Optional[bool]:
|
|
||||||
"""Evaluate an allow block against the current actor."""
|
|
||||||
if allow_block is None:
|
|
||||||
return None
|
|
||||||
return actor_matches_allow(self.actor, allow_block)
|
|
||||||
|
|
||||||
def is_in_restriction_allowlist(
|
|
||||||
self,
|
|
||||||
parent: Optional[str],
|
|
||||||
child: Optional[str],
|
|
||||||
) -> bool:
|
|
||||||
"""Check if resource is allowed by actor restrictions."""
|
|
||||||
if not self.has_restrictions:
|
|
||||||
return True # No restrictions, all resources allowed
|
|
||||||
|
|
||||||
# Check global allowlist
|
|
||||||
if self.action_checks.intersection(self.restrictions.get("a", [])):
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Check database-level allowlist
|
|
||||||
if parent and self.action_checks.intersection(
|
|
||||||
self.restrictions.get("d", {}).get(parent, [])
|
|
||||||
):
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Check table-level allowlist
|
|
||||||
if parent:
|
|
||||||
table_restrictions = (self.restrictions.get("r", {}) or {}).get(parent, {})
|
|
||||||
if child:
|
|
||||||
table_actions = table_restrictions.get(child, [])
|
|
||||||
if self.action_checks.intersection(table_actions):
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
# Parent query should proceed if any child in this database is allowlisted
|
|
||||||
for table_actions in table_restrictions.values():
|
|
||||||
if self.action_checks.intersection(table_actions):
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Parent/child both None: include if any restrictions exist for this action
|
|
||||||
if parent is None and child is None:
|
|
||||||
if self.action_checks.intersection(self.restrictions.get("a", [])):
|
|
||||||
return True
|
|
||||||
if self.restricted_databases:
|
|
||||||
return True
|
|
||||||
if self.restricted_tables:
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def add_permissions_rule(
|
|
||||||
self,
|
|
||||||
parent: Optional[str],
|
|
||||||
child: Optional[str],
|
|
||||||
permissions_block: Optional[dict],
|
|
||||||
scope_desc: str,
|
|
||||||
) -> None:
|
|
||||||
"""Add a rule from a permissions:{action} block."""
|
|
||||||
if permissions_block is None:
|
|
||||||
return
|
|
||||||
|
|
||||||
action_allow_block = permissions_block.get(self.action)
|
|
||||||
result = self.evaluate_allow_block(action_allow_block)
|
|
||||||
|
|
||||||
self.collector.add(
|
|
||||||
parent=parent,
|
|
||||||
child=child,
|
|
||||||
allow=result,
|
|
||||||
reason=f"config {'allow' if result else 'deny'} {scope_desc}",
|
|
||||||
if_not_none=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
def add_allow_block_rule(
|
|
||||||
self,
|
|
||||||
parent: Optional[str],
|
|
||||||
child: Optional[str],
|
|
||||||
allow_block: Any,
|
|
||||||
scope_desc: str,
|
|
||||||
) -> None:
|
|
||||||
"""
|
|
||||||
Add rules from an allow:{} block.
|
|
||||||
|
|
||||||
For allow blocks, if the block exists but doesn't match the actor,
|
|
||||||
this is treated as a deny. We also handle the restriction-gate logic.
|
|
||||||
"""
|
|
||||||
if allow_block is None:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Skip if resource is not in restriction allowlist
|
|
||||||
if not self.is_in_restriction_allowlist(parent, child):
|
|
||||||
return
|
|
||||||
|
|
||||||
result = self.evaluate_allow_block(allow_block)
|
|
||||||
bool_result = bool(result)
|
|
||||||
|
|
||||||
self.collector.add(
|
|
||||||
parent,
|
|
||||||
child,
|
|
||||||
bool_result,
|
|
||||||
f"config {'allow' if result else 'deny'} {scope_desc}",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Handle restriction-gate: add explicit denies for restricted resources
|
|
||||||
self._add_restriction_gate_denies(parent, child, bool_result, scope_desc)
|
|
||||||
|
|
||||||
def _add_restriction_gate_denies(
|
|
||||||
self,
|
|
||||||
parent: Optional[str],
|
|
||||||
child: Optional[str],
|
|
||||||
is_allowed: bool,
|
|
||||||
scope_desc: str,
|
|
||||||
) -> None:
|
|
||||||
"""
|
|
||||||
When a config rule denies at a higher level, add explicit denies
|
|
||||||
for restricted resources to prevent child-level allows from
|
|
||||||
incorrectly granting access.
|
|
||||||
"""
|
|
||||||
if is_allowed or child is not None or not self.has_restrictions:
|
|
||||||
return
|
|
||||||
|
|
||||||
if not self.action_obj:
|
|
||||||
return
|
|
||||||
|
|
||||||
reason = f"config deny {scope_desc} (restriction gate)"
|
|
||||||
|
|
||||||
if parent is None:
|
|
||||||
# Root-level deny: add denies for all restricted resources
|
|
||||||
if self.action_obj.takes_parent:
|
|
||||||
for db_name in self.restricted_databases:
|
|
||||||
self.collector.add(db_name, None, False, reason)
|
|
||||||
if self.action_obj.takes_child:
|
|
||||||
for db_name, table_name in self.restricted_tables:
|
|
||||||
self.collector.add(db_name, table_name, False, reason)
|
|
||||||
else:
|
|
||||||
# Database-level deny: add denies for tables in that database
|
|
||||||
if self.action_obj.takes_child:
|
|
||||||
for db_name, table_name in self.restricted_tables:
|
|
||||||
if db_name == parent:
|
|
||||||
self.collector.add(db_name, table_name, False, reason)
|
|
||||||
|
|
||||||
def process(self) -> Optional[PermissionSQL]:
|
|
||||||
"""Process all config rules and return combined PermissionSQL."""
|
|
||||||
self._process_root_permissions()
|
|
||||||
self._process_databases()
|
|
||||||
self._process_root_allow_blocks()
|
|
||||||
|
|
||||||
return self.collector.to_permission_sql()
|
|
||||||
|
|
||||||
def _process_root_permissions(self) -> None:
|
|
||||||
"""Process root-level permissions block."""
|
|
||||||
root_perms = self.config.get("permissions") or {}
|
|
||||||
self.add_permissions_rule(
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
root_perms,
|
|
||||||
f"permissions for {self.action}",
|
|
||||||
)
|
|
||||||
|
|
||||||
def _process_databases(self) -> None:
|
|
||||||
"""Process database-level and nested configurations."""
|
|
||||||
databases = self.config.get("databases") or {}
|
|
||||||
|
|
||||||
for db_name, db_config in databases.items():
|
|
||||||
self._process_database(db_name, db_config or {})
|
|
||||||
|
|
||||||
def _process_database(self, db_name: str, db_config: dict) -> None:
|
|
||||||
"""Process a single database's configuration."""
|
|
||||||
# Database-level permissions block
|
|
||||||
db_perms = db_config.get("permissions") or {}
|
|
||||||
self.add_permissions_rule(
|
|
||||||
db_name,
|
|
||||||
None,
|
|
||||||
db_perms,
|
|
||||||
f"permissions for {self.action} on {db_name}",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Process tables
|
|
||||||
for table_name, table_config in (db_config.get("tables") or {}).items():
|
|
||||||
self._process_table(db_name, table_name, table_config or {})
|
|
||||||
|
|
||||||
# Process queries
|
|
||||||
for query_name, query_config in (db_config.get("queries") or {}).items():
|
|
||||||
self._process_query(db_name, query_name, query_config)
|
|
||||||
|
|
||||||
# Database-level allow blocks
|
|
||||||
self._process_database_allow_blocks(db_name, db_config)
|
|
||||||
|
|
||||||
def _process_table(
|
|
||||||
self,
|
|
||||||
db_name: str,
|
|
||||||
table_name: str,
|
|
||||||
table_config: dict,
|
|
||||||
) -> None:
|
|
||||||
"""Process a single table's configuration."""
|
|
||||||
# Table-level permissions block
|
|
||||||
table_perms = table_config.get("permissions") or {}
|
|
||||||
self.add_permissions_rule(
|
|
||||||
db_name,
|
|
||||||
table_name,
|
|
||||||
table_perms,
|
|
||||||
f"permissions for {self.action} on {db_name}/{table_name}",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Table-level allow block (for view-table)
|
|
||||||
if self.action == "view-table":
|
|
||||||
self.add_allow_block_rule(
|
|
||||||
db_name,
|
|
||||||
table_name,
|
|
||||||
table_config.get("allow"),
|
|
||||||
f"allow for {self.action} on {db_name}/{table_name}",
|
|
||||||
)
|
|
||||||
|
|
||||||
def _process_query(
|
|
||||||
self,
|
|
||||||
db_name: str,
|
|
||||||
query_name: str,
|
|
||||||
query_config: Any,
|
|
||||||
) -> None:
|
|
||||||
"""Process a single query's configuration."""
|
|
||||||
# Query config can be a string (just SQL) or dict
|
|
||||||
if not isinstance(query_config, dict):
|
|
||||||
return
|
|
||||||
|
|
||||||
# Query-level permissions block
|
|
||||||
query_perms = query_config.get("permissions") or {}
|
|
||||||
self.add_permissions_rule(
|
|
||||||
db_name,
|
|
||||||
query_name,
|
|
||||||
query_perms,
|
|
||||||
f"permissions for {self.action} on {db_name}/{query_name}",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Query-level allow block (for view-query)
|
|
||||||
if self.action == "view-query":
|
|
||||||
self.add_allow_block_rule(
|
|
||||||
db_name,
|
|
||||||
query_name,
|
|
||||||
query_config.get("allow"),
|
|
||||||
f"allow for {self.action} on {db_name}/{query_name}",
|
|
||||||
)
|
|
||||||
|
|
||||||
def _process_database_allow_blocks(
|
|
||||||
self,
|
|
||||||
db_name: str,
|
|
||||||
db_config: dict,
|
|
||||||
) -> None:
|
|
||||||
"""Process database-level allow/allow_sql blocks."""
|
|
||||||
# view-database allow block
|
|
||||||
if self.action == "view-database":
|
|
||||||
self.add_allow_block_rule(
|
|
||||||
db_name,
|
|
||||||
None,
|
|
||||||
db_config.get("allow"),
|
|
||||||
f"allow for {self.action} on {db_name}",
|
|
||||||
)
|
|
||||||
|
|
||||||
# execute-sql allow_sql block
|
|
||||||
if self.action == "execute-sql":
|
|
||||||
self.add_allow_block_rule(
|
|
||||||
db_name,
|
|
||||||
None,
|
|
||||||
db_config.get("allow_sql"),
|
|
||||||
f"allow_sql for {db_name}",
|
|
||||||
)
|
|
||||||
|
|
||||||
# view-table uses database-level allow for inheritance
|
|
||||||
if self.action == "view-table":
|
|
||||||
self.add_allow_block_rule(
|
|
||||||
db_name,
|
|
||||||
None,
|
|
||||||
db_config.get("allow"),
|
|
||||||
f"allow for {self.action} on {db_name}",
|
|
||||||
)
|
|
||||||
|
|
||||||
# view-query uses database-level allow for inheritance
|
|
||||||
if self.action == "view-query":
|
|
||||||
self.add_allow_block_rule(
|
|
||||||
db_name,
|
|
||||||
None,
|
|
||||||
db_config.get("allow"),
|
|
||||||
f"allow for {self.action} on {db_name}",
|
|
||||||
)
|
|
||||||
|
|
||||||
def _process_root_allow_blocks(self) -> None:
|
|
||||||
"""Process root-level allow/allow_sql blocks."""
|
|
||||||
root_allow = self.config.get("allow")
|
|
||||||
|
|
||||||
if self.action == "view-instance":
|
|
||||||
self.add_allow_block_rule(
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
root_allow,
|
|
||||||
"allow for view-instance",
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.action == "view-database":
|
|
||||||
self.add_allow_block_rule(
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
root_allow,
|
|
||||||
"allow for view-database",
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.action == "view-table":
|
|
||||||
self.add_allow_block_rule(
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
root_allow,
|
|
||||||
"allow for view-table",
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.action == "view-query":
|
|
||||||
self.add_allow_block_rule(
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
root_allow,
|
|
||||||
"allow for view-query",
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.action == "execute-sql":
|
|
||||||
self.add_allow_block_rule(
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
self.config.get("allow_sql"),
|
|
||||||
"allow_sql",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl(specname="permission_resources_sql")
|
|
||||||
async def config_permissions_sql(
|
|
||||||
datasette: "Datasette",
|
|
||||||
actor: Optional[dict],
|
|
||||||
action: str,
|
|
||||||
) -> Optional[List[PermissionSQL]]:
|
|
||||||
"""
|
|
||||||
Apply permission rules from datasette.yaml configuration.
|
|
||||||
|
|
||||||
This processes:
|
|
||||||
- permissions: blocks at root, database, table, and query levels
|
|
||||||
- allow: blocks for view-* actions
|
|
||||||
- allow_sql: blocks for execute-sql action
|
|
||||||
"""
|
|
||||||
processor = ConfigPermissionProcessor(datasette, actor, action)
|
|
||||||
result = processor.process()
|
|
||||||
|
|
||||||
if result is None:
|
|
||||||
return []
|
|
||||||
|
|
||||||
return [result]
|
|
||||||
|
|
@ -1,70 +0,0 @@
|
||||||
"""
|
|
||||||
Default permission settings for Datasette.
|
|
||||||
|
|
||||||
Provides default allow rules for standard view/execute actions.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import TYPE_CHECKING, Optional
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from datasette.app import Datasette
|
|
||||||
|
|
||||||
from datasette import hookimpl
|
|
||||||
from datasette.permissions import PermissionSQL
|
|
||||||
|
|
||||||
|
|
||||||
# Actions that are allowed by default (unless --default-deny is used)
|
|
||||||
DEFAULT_ALLOW_ACTIONS = frozenset(
|
|
||||||
{
|
|
||||||
"view-instance",
|
|
||||||
"view-database",
|
|
||||||
"view-database-download",
|
|
||||||
"view-table",
|
|
||||||
"view-query",
|
|
||||||
"execute-sql",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl(specname="permission_resources_sql")
|
|
||||||
async def default_allow_sql_check(
|
|
||||||
datasette: "Datasette",
|
|
||||||
actor: Optional[dict],
|
|
||||||
action: str,
|
|
||||||
) -> Optional[PermissionSQL]:
|
|
||||||
"""
|
|
||||||
Enforce the default_allow_sql setting.
|
|
||||||
|
|
||||||
When default_allow_sql is false (the default), execute-sql is denied
|
|
||||||
unless explicitly allowed by config or other rules.
|
|
||||||
"""
|
|
||||||
if action == "execute-sql":
|
|
||||||
if not datasette.setting("default_allow_sql"):
|
|
||||||
return PermissionSQL.deny(reason="default_allow_sql is false")
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl(specname="permission_resources_sql")
|
|
||||||
async def default_action_permissions_sql(
|
|
||||||
datasette: "Datasette",
|
|
||||||
actor: Optional[dict],
|
|
||||||
action: str,
|
|
||||||
) -> Optional[PermissionSQL]:
|
|
||||||
"""
|
|
||||||
Provide default allow rules for standard view/execute actions.
|
|
||||||
|
|
||||||
These defaults are skipped when datasette is started with --default-deny.
|
|
||||||
The restriction_sql mechanism (from actor_restrictions_sql) will still
|
|
||||||
filter these results if the actor has restrictions.
|
|
||||||
"""
|
|
||||||
if datasette.default_deny:
|
|
||||||
return None
|
|
||||||
|
|
||||||
if action in DEFAULT_ALLOW_ACTIONS:
|
|
||||||
reason = f"default allow for {action}".replace("'", "''")
|
|
||||||
return PermissionSQL.allow(reason=reason)
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
@ -1,85 +0,0 @@
|
||||||
"""
|
|
||||||
Shared helper utilities for default permission implementations.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from typing import TYPE_CHECKING, List, Optional, Set
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from datasette.app import Datasette
|
|
||||||
|
|
||||||
from datasette.permissions import PermissionSQL
|
|
||||||
|
|
||||||
|
|
||||||
def get_action_name_variants(datasette: "Datasette", action: str) -> Set[str]:
|
|
||||||
"""
|
|
||||||
Get all name variants for an action (full name and abbreviation).
|
|
||||||
|
|
||||||
Example:
|
|
||||||
get_action_name_variants(ds, "view-table") -> {"view-table", "vt"}
|
|
||||||
"""
|
|
||||||
variants = {action}
|
|
||||||
action_obj = datasette.actions.get(action)
|
|
||||||
if action_obj and action_obj.abbr:
|
|
||||||
variants.add(action_obj.abbr)
|
|
||||||
return variants
|
|
||||||
|
|
||||||
|
|
||||||
def action_in_list(datasette: "Datasette", action: str, action_list: list) -> bool:
|
|
||||||
"""Check if an action (or its abbreviation) is in a list."""
|
|
||||||
return bool(get_action_name_variants(datasette, action).intersection(action_list))
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class PermissionRow:
|
|
||||||
"""A single permission rule row."""
|
|
||||||
|
|
||||||
parent: Optional[str]
|
|
||||||
child: Optional[str]
|
|
||||||
allow: bool
|
|
||||||
reason: str
|
|
||||||
|
|
||||||
|
|
||||||
class PermissionRowCollector:
|
|
||||||
"""Collects permission rows and converts them to PermissionSQL."""
|
|
||||||
|
|
||||||
def __init__(self, prefix: str = "row"):
|
|
||||||
self.rows: List[PermissionRow] = []
|
|
||||||
self.prefix = prefix
|
|
||||||
|
|
||||||
def add(
|
|
||||||
self,
|
|
||||||
parent: Optional[str],
|
|
||||||
child: Optional[str],
|
|
||||||
allow: Optional[bool],
|
|
||||||
reason: str,
|
|
||||||
if_not_none: bool = False,
|
|
||||||
) -> None:
|
|
||||||
"""Add a permission row. If if_not_none=True, only add if allow is not None."""
|
|
||||||
if if_not_none and allow is None:
|
|
||||||
return
|
|
||||||
self.rows.append(PermissionRow(parent, child, allow, reason))
|
|
||||||
|
|
||||||
def to_permission_sql(self) -> Optional[PermissionSQL]:
|
|
||||||
"""Convert collected rows to a PermissionSQL object."""
|
|
||||||
if not self.rows:
|
|
||||||
return None
|
|
||||||
|
|
||||||
parts = []
|
|
||||||
params = {}
|
|
||||||
|
|
||||||
for idx, row in enumerate(self.rows):
|
|
||||||
key = f"{self.prefix}_{idx}"
|
|
||||||
parts.append(
|
|
||||||
f"SELECT :{key}_parent AS parent, :{key}_child AS child, "
|
|
||||||
f":{key}_allow AS allow, :{key}_reason AS reason"
|
|
||||||
)
|
|
||||||
params[f"{key}_parent"] = row.parent
|
|
||||||
params[f"{key}_child"] = row.child
|
|
||||||
params[f"{key}_allow"] = 1 if row.allow else 0
|
|
||||||
params[f"{key}_reason"] = row.reason
|
|
||||||
|
|
||||||
sql = "\nUNION ALL\n".join(parts)
|
|
||||||
return PermissionSQL(sql=sql, params=params)
|
|
||||||
|
|
@ -1,195 +0,0 @@
|
||||||
"""
|
|
||||||
Actor restriction handling for Datasette permissions.
|
|
||||||
|
|
||||||
This module handles the _r (restrictions) key in actor dictionaries, which
|
|
||||||
contains allowlists of resources the actor can access.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from typing import TYPE_CHECKING, List, Optional, Set, Tuple
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from datasette.app import Datasette
|
|
||||||
|
|
||||||
from datasette import hookimpl
|
|
||||||
from datasette.permissions import PermissionSQL
|
|
||||||
|
|
||||||
from .helpers import action_in_list, get_action_name_variants
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class ActorRestrictions:
|
|
||||||
"""Parsed actor restrictions from the _r key."""
|
|
||||||
|
|
||||||
global_actions: List[str] # _r.a - globally allowed actions
|
|
||||||
database_actions: dict # _r.d - {db_name: [actions]}
|
|
||||||
table_actions: dict # _r.r - {db_name: {table: [actions]}}
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_actor(cls, actor: Optional[dict]) -> Optional["ActorRestrictions"]:
|
|
||||||
"""Parse restrictions from actor dict. Returns None if no restrictions."""
|
|
||||||
if not actor:
|
|
||||||
return None
|
|
||||||
assert isinstance(actor, dict), "actor must be a dictionary"
|
|
||||||
|
|
||||||
restrictions = actor.get("_r")
|
|
||||||
if restrictions is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return cls(
|
|
||||||
global_actions=restrictions.get("a", []),
|
|
||||||
database_actions=restrictions.get("d", {}),
|
|
||||||
table_actions=restrictions.get("r", {}),
|
|
||||||
)
|
|
||||||
|
|
||||||
def is_action_globally_allowed(self, datasette: "Datasette", action: str) -> bool:
|
|
||||||
"""Check if action is in the global allowlist."""
|
|
||||||
return action_in_list(datasette, action, self.global_actions)
|
|
||||||
|
|
||||||
def get_allowed_databases(self, datasette: "Datasette", action: str) -> Set[str]:
|
|
||||||
"""Get database names where this action is allowed."""
|
|
||||||
allowed = set()
|
|
||||||
for db_name, db_actions in self.database_actions.items():
|
|
||||||
if action_in_list(datasette, action, db_actions):
|
|
||||||
allowed.add(db_name)
|
|
||||||
return allowed
|
|
||||||
|
|
||||||
def get_allowed_tables(
|
|
||||||
self, datasette: "Datasette", action: str
|
|
||||||
) -> Set[Tuple[str, str]]:
|
|
||||||
"""Get (database, table) pairs where this action is allowed."""
|
|
||||||
allowed = set()
|
|
||||||
for db_name, tables in self.table_actions.items():
|
|
||||||
for table_name, table_actions in tables.items():
|
|
||||||
if action_in_list(datasette, action, table_actions):
|
|
||||||
allowed.add((db_name, table_name))
|
|
||||||
return allowed
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl(specname="permission_resources_sql")
|
|
||||||
async def actor_restrictions_sql(
|
|
||||||
datasette: "Datasette",
|
|
||||||
actor: Optional[dict],
|
|
||||||
action: str,
|
|
||||||
) -> Optional[List[PermissionSQL]]:
|
|
||||||
"""
|
|
||||||
Handle actor restriction-based permission rules.
|
|
||||||
|
|
||||||
When an actor has an "_r" key, it contains an allowlist of resources they
|
|
||||||
can access. This function returns restriction_sql that filters the final
|
|
||||||
results to only include resources in that allowlist.
|
|
||||||
|
|
||||||
The _r structure:
|
|
||||||
{
|
|
||||||
"a": ["vi", "pd"], # Global actions allowed
|
|
||||||
"d": {"mydb": ["vt", "es"]}, # Database-level actions
|
|
||||||
"r": {"mydb": {"users": ["vt"]}} # Table-level actions
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
if not actor:
|
|
||||||
return None
|
|
||||||
|
|
||||||
restrictions = ActorRestrictions.from_actor(actor)
|
|
||||||
|
|
||||||
if restrictions is None:
|
|
||||||
# No restrictions - all resources allowed
|
|
||||||
return []
|
|
||||||
|
|
||||||
# If globally allowed, no filtering needed
|
|
||||||
if restrictions.is_action_globally_allowed(datasette, action):
|
|
||||||
return []
|
|
||||||
|
|
||||||
# Build restriction SQL
|
|
||||||
allowed_dbs = restrictions.get_allowed_databases(datasette, action)
|
|
||||||
allowed_tables = restrictions.get_allowed_tables(datasette, action)
|
|
||||||
|
|
||||||
# If nothing is allowed for this action, return empty-set restriction
|
|
||||||
if not allowed_dbs and not allowed_tables:
|
|
||||||
return [
|
|
||||||
PermissionSQL(
|
|
||||||
params={"deny": f"actor restrictions: {action} not in allowlist"},
|
|
||||||
restriction_sql="SELECT NULL AS parent, NULL AS child WHERE 0",
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
# Build UNION of allowed resources
|
|
||||||
selects = []
|
|
||||||
params = {}
|
|
||||||
counter = 0
|
|
||||||
|
|
||||||
# Database-level entries (parent, NULL) - allows all children
|
|
||||||
for db_name in allowed_dbs:
|
|
||||||
key = f"restr_{counter}"
|
|
||||||
counter += 1
|
|
||||||
selects.append(f"SELECT :{key}_parent AS parent, NULL AS child")
|
|
||||||
params[f"{key}_parent"] = db_name
|
|
||||||
|
|
||||||
# Table-level entries (parent, child)
|
|
||||||
for db_name, table_name in allowed_tables:
|
|
||||||
key = f"restr_{counter}"
|
|
||||||
counter += 1
|
|
||||||
selects.append(f"SELECT :{key}_parent AS parent, :{key}_child AS child")
|
|
||||||
params[f"{key}_parent"] = db_name
|
|
||||||
params[f"{key}_child"] = table_name
|
|
||||||
|
|
||||||
restriction_sql = "\nUNION ALL\n".join(selects)
|
|
||||||
|
|
||||||
return [PermissionSQL(params=params, restriction_sql=restriction_sql)]
|
|
||||||
|
|
||||||
|
|
||||||
def restrictions_allow_action(
|
|
||||||
datasette: "Datasette",
|
|
||||||
restrictions: dict,
|
|
||||||
action: str,
|
|
||||||
resource: Optional[str | Tuple[str, str]],
|
|
||||||
) -> bool:
|
|
||||||
"""
|
|
||||||
Check if restrictions allow the requested action on the requested resource.
|
|
||||||
|
|
||||||
This is a synchronous utility function for use by other code that needs
|
|
||||||
to quickly check restriction allowlists.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
datasette: The Datasette instance
|
|
||||||
restrictions: The _r dict from an actor
|
|
||||||
action: The action name to check
|
|
||||||
resource: None for global, str for database, (db, table) tuple for table
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if allowed, False if denied
|
|
||||||
"""
|
|
||||||
# Does this action have an abbreviation?
|
|
||||||
to_check = get_action_name_variants(datasette, action)
|
|
||||||
|
|
||||||
# Check global level (any resource)
|
|
||||||
all_allowed = restrictions.get("a")
|
|
||||||
if all_allowed is not None:
|
|
||||||
assert isinstance(all_allowed, list)
|
|
||||||
if to_check.intersection(all_allowed):
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Check database level
|
|
||||||
if resource:
|
|
||||||
if isinstance(resource, str):
|
|
||||||
database_name = resource
|
|
||||||
else:
|
|
||||||
database_name = resource[0]
|
|
||||||
database_allowed = restrictions.get("d", {}).get(database_name)
|
|
||||||
if database_allowed is not None:
|
|
||||||
assert isinstance(database_allowed, list)
|
|
||||||
if to_check.intersection(database_allowed):
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Check table/resource level
|
|
||||||
if resource is not None and not isinstance(resource, str) and len(resource) == 2:
|
|
||||||
database, table = resource
|
|
||||||
table_allowed = restrictions.get("r", {}).get(database, {}).get(table)
|
|
||||||
if table_allowed is not None:
|
|
||||||
assert isinstance(table_allowed, list)
|
|
||||||
if to_check.intersection(table_allowed):
|
|
||||||
return True
|
|
||||||
|
|
||||||
# This action is not explicitly allowed, so reject it
|
|
||||||
return False
|
|
||||||
|
|
@ -1,29 +0,0 @@
|
||||||
"""
|
|
||||||
Root user permission handling for Datasette.
|
|
||||||
|
|
||||||
Grants full permissions to the root user when --root flag is used.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import TYPE_CHECKING, Optional
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from datasette.app import Datasette
|
|
||||||
|
|
||||||
from datasette import hookimpl
|
|
||||||
from datasette.permissions import PermissionSQL
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl(specname="permission_resources_sql")
|
|
||||||
async def root_user_permissions_sql(
|
|
||||||
datasette: "Datasette",
|
|
||||||
actor: Optional[dict],
|
|
||||||
) -> Optional[PermissionSQL]:
|
|
||||||
"""
|
|
||||||
Grant root user full permissions when --root flag is used.
|
|
||||||
"""
|
|
||||||
if not datasette.root_enabled:
|
|
||||||
return None
|
|
||||||
if actor is not None and actor.get("id") == "root":
|
|
||||||
return PermissionSQL.allow(reason="root user")
|
|
||||||
|
|
@ -1,95 +0,0 @@
|
||||||
"""
|
|
||||||
Token authentication for Datasette.
|
|
||||||
|
|
||||||
Handles signed API tokens (dstok_ prefix).
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import time
|
|
||||||
from typing import TYPE_CHECKING, Optional
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from datasette.app import Datasette
|
|
||||||
|
|
||||||
import itsdangerous
|
|
||||||
|
|
||||||
from datasette import hookimpl
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl(specname="actor_from_request")
|
|
||||||
def actor_from_signed_api_token(datasette: "Datasette", request) -> Optional[dict]:
|
|
||||||
"""
|
|
||||||
Authenticate requests using signed API tokens (dstok_ prefix).
|
|
||||||
|
|
||||||
Token structure (signed JSON):
|
|
||||||
{
|
|
||||||
"a": "actor_id", # Actor ID
|
|
||||||
"t": 1234567890, # Timestamp (Unix epoch)
|
|
||||||
"d": 3600, # Optional: Duration in seconds
|
|
||||||
"_r": {...} # Optional: Restrictions
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
prefix = "dstok_"
|
|
||||||
|
|
||||||
# Check if tokens are enabled
|
|
||||||
if not datasette.setting("allow_signed_tokens"):
|
|
||||||
return None
|
|
||||||
|
|
||||||
max_signed_tokens_ttl = datasette.setting("max_signed_tokens_ttl")
|
|
||||||
|
|
||||||
# Get authorization header
|
|
||||||
authorization = request.headers.get("authorization")
|
|
||||||
if not authorization:
|
|
||||||
return None
|
|
||||||
if not authorization.startswith("Bearer "):
|
|
||||||
return None
|
|
||||||
|
|
||||||
token = authorization[len("Bearer ") :]
|
|
||||||
if not token.startswith(prefix):
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Remove prefix and verify signature
|
|
||||||
token = token[len(prefix) :]
|
|
||||||
try:
|
|
||||||
decoded = datasette.unsign(token, namespace="token")
|
|
||||||
except itsdangerous.BadSignature:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Validate timestamp
|
|
||||||
if "t" not in decoded:
|
|
||||||
return None
|
|
||||||
created = decoded["t"]
|
|
||||||
if not isinstance(created, int):
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Handle duration/expiry
|
|
||||||
duration = decoded.get("d")
|
|
||||||
if duration is not None and not isinstance(duration, int):
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Apply max TTL if configured
|
|
||||||
if (duration is None and max_signed_tokens_ttl) or (
|
|
||||||
duration is not None
|
|
||||||
and max_signed_tokens_ttl
|
|
||||||
and duration > max_signed_tokens_ttl
|
|
||||||
):
|
|
||||||
duration = max_signed_tokens_ttl
|
|
||||||
|
|
||||||
# Check expiry
|
|
||||||
if duration:
|
|
||||||
if time.time() - created > duration:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Build actor dict
|
|
||||||
actor = {"id": decoded["a"], "token": "dstok"}
|
|
||||||
|
|
||||||
# Copy restrictions if present
|
|
||||||
if "_r" in decoded:
|
|
||||||
actor["_r"] = decoded["_r"]
|
|
||||||
|
|
||||||
# Add expiry timestamp if applicable
|
|
||||||
if duration:
|
|
||||||
actor["token_expires"] = created + duration
|
|
||||||
|
|
||||||
return actor
|
|
||||||
|
|
@ -1,235 +0,0 @@
|
||||||
from abc import ABC, abstractproperty
|
|
||||||
from dataclasses import asdict, dataclass, field
|
|
||||||
from datasette.hookspecs import hookimpl
|
|
||||||
from datetime import datetime, timezone
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class Event(ABC):
|
|
||||||
@abstractproperty
|
|
||||||
def name(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
created: datetime = field(
|
|
||||||
init=False, default_factory=lambda: datetime.now(timezone.utc)
|
|
||||||
)
|
|
||||||
actor: dict | None
|
|
||||||
|
|
||||||
def properties(self):
|
|
||||||
properties = asdict(self)
|
|
||||||
properties.pop("actor", None)
|
|
||||||
properties.pop("created", None)
|
|
||||||
return properties
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class LoginEvent(Event):
|
|
||||||
"""
|
|
||||||
Event name: ``login``
|
|
||||||
|
|
||||||
A user (represented by ``event.actor``) has logged in.
|
|
||||||
"""
|
|
||||||
|
|
||||||
name = "login"
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class LogoutEvent(Event):
|
|
||||||
"""
|
|
||||||
Event name: ``logout``
|
|
||||||
|
|
||||||
A user (represented by ``event.actor``) has logged out.
|
|
||||||
"""
|
|
||||||
|
|
||||||
name = "logout"
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class CreateTokenEvent(Event):
|
|
||||||
"""
|
|
||||||
Event name: ``create-token``
|
|
||||||
|
|
||||||
A user created an API token.
|
|
||||||
|
|
||||||
:ivar expires_after: Number of seconds after which this token will expire.
|
|
||||||
:type expires_after: int or None
|
|
||||||
:ivar restrict_all: Restricted permissions for this token.
|
|
||||||
:type restrict_all: list
|
|
||||||
:ivar restrict_database: Restricted database permissions for this token.
|
|
||||||
:type restrict_database: dict
|
|
||||||
:ivar restrict_resource: Restricted resource permissions for this token.
|
|
||||||
:type restrict_resource: dict
|
|
||||||
"""
|
|
||||||
|
|
||||||
name = "create-token"
|
|
||||||
expires_after: int | None
|
|
||||||
restrict_all: list
|
|
||||||
restrict_database: dict
|
|
||||||
restrict_resource: dict
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class CreateTableEvent(Event):
|
|
||||||
"""
|
|
||||||
Event name: ``create-table``
|
|
||||||
|
|
||||||
A new table has been created in the database.
|
|
||||||
|
|
||||||
:ivar database: The name of the database where the table was created.
|
|
||||||
:type database: str
|
|
||||||
:ivar table: The name of the table that was created
|
|
||||||
:type table: str
|
|
||||||
:ivar schema: The SQL schema definition for the new table.
|
|
||||||
:type schema: str
|
|
||||||
"""
|
|
||||||
|
|
||||||
name = "create-table"
|
|
||||||
database: str
|
|
||||||
table: str
|
|
||||||
schema: str
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class DropTableEvent(Event):
|
|
||||||
"""
|
|
||||||
Event name: ``drop-table``
|
|
||||||
|
|
||||||
A table has been dropped from the database.
|
|
||||||
|
|
||||||
:ivar database: The name of the database where the table was dropped.
|
|
||||||
:type database: str
|
|
||||||
:ivar table: The name of the table that was dropped
|
|
||||||
:type table: str
|
|
||||||
"""
|
|
||||||
|
|
||||||
name = "drop-table"
|
|
||||||
database: str
|
|
||||||
table: str
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class AlterTableEvent(Event):
|
|
||||||
"""
|
|
||||||
Event name: ``alter-table``
|
|
||||||
|
|
||||||
A table has been altered.
|
|
||||||
|
|
||||||
:ivar database: The name of the database where the table was altered
|
|
||||||
:type database: str
|
|
||||||
:ivar table: The name of the table that was altered
|
|
||||||
:type table: str
|
|
||||||
:ivar before_schema: The table's SQL schema before the alteration
|
|
||||||
:type before_schema: str
|
|
||||||
:ivar after_schema: The table's SQL schema after the alteration
|
|
||||||
:type after_schema: str
|
|
||||||
"""
|
|
||||||
|
|
||||||
name = "alter-table"
|
|
||||||
database: str
|
|
||||||
table: str
|
|
||||||
before_schema: str
|
|
||||||
after_schema: str
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class InsertRowsEvent(Event):
|
|
||||||
"""
|
|
||||||
Event name: ``insert-rows``
|
|
||||||
|
|
||||||
Rows were inserted into a table.
|
|
||||||
|
|
||||||
:ivar database: The name of the database where the rows were inserted.
|
|
||||||
:type database: str
|
|
||||||
:ivar table: The name of the table where the rows were inserted.
|
|
||||||
:type table: str
|
|
||||||
:ivar num_rows: The number of rows that were requested to be inserted.
|
|
||||||
:type num_rows: int
|
|
||||||
:ivar ignore: Was ignore set?
|
|
||||||
:type ignore: bool
|
|
||||||
:ivar replace: Was replace set?
|
|
||||||
:type replace: bool
|
|
||||||
"""
|
|
||||||
|
|
||||||
name = "insert-rows"
|
|
||||||
database: str
|
|
||||||
table: str
|
|
||||||
num_rows: int
|
|
||||||
ignore: bool
|
|
||||||
replace: bool
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class UpsertRowsEvent(Event):
|
|
||||||
"""
|
|
||||||
Event name: ``upsert-rows``
|
|
||||||
|
|
||||||
Rows were upserted into a table.
|
|
||||||
|
|
||||||
:ivar database: The name of the database where the rows were inserted.
|
|
||||||
:type database: str
|
|
||||||
:ivar table: The name of the table where the rows were inserted.
|
|
||||||
:type table: str
|
|
||||||
:ivar num_rows: The number of rows that were requested to be inserted.
|
|
||||||
:type num_rows: int
|
|
||||||
"""
|
|
||||||
|
|
||||||
name = "upsert-rows"
|
|
||||||
database: str
|
|
||||||
table: str
|
|
||||||
num_rows: int
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class UpdateRowEvent(Event):
|
|
||||||
"""
|
|
||||||
Event name: ``update-row``
|
|
||||||
|
|
||||||
A row was updated in a table.
|
|
||||||
|
|
||||||
:ivar database: The name of the database where the row was updated.
|
|
||||||
:type database: str
|
|
||||||
:ivar table: The name of the table where the row was updated.
|
|
||||||
:type table: str
|
|
||||||
:ivar pks: The primary key values of the updated row.
|
|
||||||
"""
|
|
||||||
|
|
||||||
name = "update-row"
|
|
||||||
database: str
|
|
||||||
table: str
|
|
||||||
pks: list
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class DeleteRowEvent(Event):
|
|
||||||
"""
|
|
||||||
Event name: ``delete-row``
|
|
||||||
|
|
||||||
A row was deleted from a table.
|
|
||||||
|
|
||||||
:ivar database: The name of the database where the row was deleted.
|
|
||||||
:type database: str
|
|
||||||
:ivar table: The name of the table where the row was deleted.
|
|
||||||
:type table: str
|
|
||||||
:ivar pks: The primary key values of the deleted row.
|
|
||||||
"""
|
|
||||||
|
|
||||||
name = "delete-row"
|
|
||||||
database: str
|
|
||||||
table: str
|
|
||||||
pks: list
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl
|
|
||||||
def register_events():
|
|
||||||
return [
|
|
||||||
LoginEvent,
|
|
||||||
LogoutEvent,
|
|
||||||
CreateTableEvent,
|
|
||||||
CreateTokenEvent,
|
|
||||||
AlterTableEvent,
|
|
||||||
DropTableEvent,
|
|
||||||
InsertRowsEvent,
|
|
||||||
UpsertRowsEvent,
|
|
||||||
UpdateRowEvent,
|
|
||||||
DeleteRowEvent,
|
|
||||||
]
|
|
||||||
|
|
@ -1,18 +1,20 @@
|
||||||
import json
|
import json
|
||||||
import urllib
|
import urllib
|
||||||
|
import re
|
||||||
from datasette import hookimpl
|
from datasette import hookimpl
|
||||||
from datasette.database import QueryInterrupted
|
|
||||||
from datasette.utils import (
|
from datasette.utils import (
|
||||||
escape_sqlite,
|
escape_sqlite,
|
||||||
path_with_added_args,
|
path_with_added_args,
|
||||||
path_with_removed_args,
|
path_with_removed_args,
|
||||||
detect_json1,
|
detect_json1,
|
||||||
|
QueryInterrupted,
|
||||||
|
InvalidSql,
|
||||||
sqlite3,
|
sqlite3,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def load_facet_configs(request, table_config):
|
def load_facet_configs(request, table_metadata):
|
||||||
# Given a request and the configuration for a table, return
|
# Given a request and the metadata configuration for a table, return
|
||||||
# a dictionary of selected facets, their lists of configs and for each
|
# a dictionary of selected facets, their lists of configs and for each
|
||||||
# config whether it came from the request or the metadata.
|
# config whether it came from the request or the metadata.
|
||||||
#
|
#
|
||||||
|
|
@ -20,21 +22,21 @@ def load_facet_configs(request, table_config):
|
||||||
# {"source": "metadata", "config": config1},
|
# {"source": "metadata", "config": config1},
|
||||||
# {"source": "request", "config": config2}]}
|
# {"source": "request", "config": config2}]}
|
||||||
facet_configs = {}
|
facet_configs = {}
|
||||||
table_config = table_config or {}
|
table_metadata = table_metadata or {}
|
||||||
table_facet_configs = table_config.get("facets", [])
|
metadata_facets = table_metadata.get("facets", [])
|
||||||
for facet_config in table_facet_configs:
|
for metadata_config in metadata_facets:
|
||||||
if isinstance(facet_config, str):
|
if isinstance(metadata_config, str):
|
||||||
type = "column"
|
type = "column"
|
||||||
facet_config = {"simple": facet_config}
|
metadata_config = {"simple": metadata_config}
|
||||||
else:
|
else:
|
||||||
assert (
|
assert (
|
||||||
len(facet_config.values()) == 1
|
len(metadata_config.values()) == 1
|
||||||
), "Metadata config dicts should be {type: config}"
|
), "Metadata config dicts should be {type: config}"
|
||||||
type, facet_config = list(facet_config.items())[0]
|
type, metadata_config = metadata_config.items()[0]
|
||||||
if isinstance(facet_config, str):
|
if isinstance(metadata_config, str):
|
||||||
facet_config = {"simple": facet_config}
|
metadata_config = {"simple": metadata_config}
|
||||||
facet_configs.setdefault(type, []).append(
|
facet_configs.setdefault(type, []).append(
|
||||||
{"source": "metadata", "config": facet_config}
|
{"source": "metadata", "config": metadata_config}
|
||||||
)
|
)
|
||||||
qs_pairs = urllib.parse.parse_qs(request.query_string, keep_blank_values=True)
|
qs_pairs = urllib.parse.parse_qs(request.query_string, keep_blank_values=True)
|
||||||
for key, values in qs_pairs.items():
|
for key, values in qs_pairs.items():
|
||||||
|
|
@ -45,19 +47,20 @@ def load_facet_configs(request, table_config):
|
||||||
elif key.startswith("_facet_"):
|
elif key.startswith("_facet_"):
|
||||||
type = key[len("_facet_") :]
|
type = key[len("_facet_") :]
|
||||||
for value in values:
|
for value in values:
|
||||||
# The value is the facet_config - either JSON or not
|
# The value is the config - either JSON or not
|
||||||
facet_config = (
|
if value.startswith("{"):
|
||||||
json.loads(value) if value.startswith("{") else {"simple": value}
|
config = json.loads(value)
|
||||||
)
|
else:
|
||||||
|
config = {"simple": value}
|
||||||
facet_configs.setdefault(type, []).append(
|
facet_configs.setdefault(type, []).append(
|
||||||
{"source": "request", "config": facet_config}
|
{"source": "request", "config": config}
|
||||||
)
|
)
|
||||||
return facet_configs
|
return facet_configs
|
||||||
|
|
||||||
|
|
||||||
@hookimpl
|
@hookimpl
|
||||||
def register_facet_classes():
|
def register_facet_classes():
|
||||||
classes = [ColumnFacet, DateFacet]
|
classes = [ColumnFacet, DateFacet, ManyToManyFacet]
|
||||||
if detect_json1():
|
if detect_json1():
|
||||||
classes.append(ArrayFacet)
|
classes.append(ArrayFacet)
|
||||||
return classes
|
return classes
|
||||||
|
|
@ -65,8 +68,6 @@ def register_facet_classes():
|
||||||
|
|
||||||
class Facet:
|
class Facet:
|
||||||
type = None
|
type = None
|
||||||
# How many rows to consider when suggesting facets:
|
|
||||||
suggest_consider = 1000
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
|
@ -76,7 +77,7 @@ class Facet:
|
||||||
sql=None,
|
sql=None,
|
||||||
table=None,
|
table=None,
|
||||||
params=None,
|
params=None,
|
||||||
table_config=None,
|
metadata=None,
|
||||||
row_count=None,
|
row_count=None,
|
||||||
):
|
):
|
||||||
assert table or sql, "Must provide either table= or sql="
|
assert table or sql, "Must provide either table= or sql="
|
||||||
|
|
@ -85,14 +86,14 @@ class Facet:
|
||||||
self.database = database
|
self.database = database
|
||||||
# For foreign key expansion. Can be None for e.g. canned SQL queries:
|
# For foreign key expansion. Can be None for e.g. canned SQL queries:
|
||||||
self.table = table
|
self.table = table
|
||||||
self.sql = sql or f"select * from [{table}]"
|
self.sql = sql or "select * from [{}]".format(table)
|
||||||
self.params = params or []
|
self.params = params or []
|
||||||
self.table_config = table_config
|
self.metadata = metadata
|
||||||
# row_count can be None, in which case we calculate it ourselves:
|
# row_count can be None, in which case we calculate it ourselves:
|
||||||
self.row_count = row_count
|
self.row_count = row_count
|
||||||
|
|
||||||
def get_configs(self):
|
def get_configs(self):
|
||||||
configs = load_facet_configs(self.request, self.table_config)
|
configs = load_facet_configs(self.request, self.metadata)
|
||||||
return configs.get(self.type) or []
|
return configs.get(self.type) or []
|
||||||
|
|
||||||
def get_querystring_pairs(self):
|
def get_querystring_pairs(self):
|
||||||
|
|
@ -100,36 +101,6 @@ class Facet:
|
||||||
# [('_foo', 'bar'), ('_foo', '2'), ('empty', '')]
|
# [('_foo', 'bar'), ('_foo', '2'), ('empty', '')]
|
||||||
return urllib.parse.parse_qsl(self.request.query_string, keep_blank_values=True)
|
return urllib.parse.parse_qsl(self.request.query_string, keep_blank_values=True)
|
||||||
|
|
||||||
def get_facet_size(self):
|
|
||||||
facet_size = self.ds.setting("default_facet_size")
|
|
||||||
max_returned_rows = self.ds.setting("max_returned_rows")
|
|
||||||
table_facet_size = None
|
|
||||||
if self.table:
|
|
||||||
config_facet_size = (
|
|
||||||
self.ds.config.get("databases", {})
|
|
||||||
.get(self.database, {})
|
|
||||||
.get("tables", {})
|
|
||||||
.get(self.table, {})
|
|
||||||
.get("facet_size")
|
|
||||||
)
|
|
||||||
if config_facet_size:
|
|
||||||
table_facet_size = config_facet_size
|
|
||||||
custom_facet_size = self.request.args.get("_facet_size")
|
|
||||||
if custom_facet_size:
|
|
||||||
if custom_facet_size == "max":
|
|
||||||
facet_size = max_returned_rows
|
|
||||||
elif custom_facet_size.isdigit():
|
|
||||||
facet_size = int(custom_facet_size)
|
|
||||||
else:
|
|
||||||
# Invalid value, ignore it
|
|
||||||
custom_facet_size = None
|
|
||||||
if table_facet_size and not custom_facet_size:
|
|
||||||
if table_facet_size == "max":
|
|
||||||
facet_size = max_returned_rows
|
|
||||||
else:
|
|
||||||
facet_size = table_facet_size
|
|
||||||
return min(facet_size, max_returned_rows)
|
|
||||||
|
|
||||||
async def suggest(self):
|
async def suggest(self):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
@ -143,10 +114,21 @@ class Facet:
|
||||||
# Detect column names using the "limit 0" trick
|
# Detect column names using the "limit 0" trick
|
||||||
return (
|
return (
|
||||||
await self.ds.execute(
|
await self.ds.execute(
|
||||||
self.database, f"select * from ({sql}) limit 0", params or []
|
self.database, "select * from ({}) limit 0".format(sql), params or []
|
||||||
)
|
)
|
||||||
).columns
|
).columns
|
||||||
|
|
||||||
|
async def get_row_count(self):
|
||||||
|
if self.row_count is None:
|
||||||
|
self.row_count = (
|
||||||
|
await self.ds.execute(
|
||||||
|
self.database,
|
||||||
|
"select count(*) from ({})".format(self.sql),
|
||||||
|
self.params,
|
||||||
|
)
|
||||||
|
).rows[0][0]
|
||||||
|
return self.row_count
|
||||||
|
|
||||||
|
|
||||||
class ColumnFacet(Facet):
|
class ColumnFacet(Facet):
|
||||||
type = "column"
|
type = "column"
|
||||||
|
|
@ -154,23 +136,19 @@ class ColumnFacet(Facet):
|
||||||
async def suggest(self):
|
async def suggest(self):
|
||||||
row_count = await self.get_row_count()
|
row_count = await self.get_row_count()
|
||||||
columns = await self.get_columns(self.sql, self.params)
|
columns = await self.get_columns(self.sql, self.params)
|
||||||
facet_size = self.get_facet_size()
|
facet_size = self.ds.config("default_facet_size")
|
||||||
suggested_facets = []
|
suggested_facets = []
|
||||||
already_enabled = [c["config"]["simple"] for c in self.get_configs()]
|
already_enabled = [c["config"]["simple"] for c in self.get_configs()]
|
||||||
for column in columns:
|
for column in columns:
|
||||||
if column in already_enabled:
|
if column in already_enabled:
|
||||||
continue
|
continue
|
||||||
suggested_facet_sql = """
|
suggested_facet_sql = """
|
||||||
with limited as (select * from ({sql}) limit {suggest_consider})
|
select distinct {column} from (
|
||||||
select {column} as value, count(*) as n from limited
|
{sql}
|
||||||
where value is not null
|
) where {column} is not null
|
||||||
group by value
|
|
||||||
limit {limit}
|
limit {limit}
|
||||||
""".format(
|
""".format(
|
||||||
column=escape_sqlite(column),
|
column=escape_sqlite(column), sql=self.sql, limit=facet_size + 1
|
||||||
sql=self.sql,
|
|
||||||
limit=facet_size + 1,
|
|
||||||
suggest_consider=self.suggest_consider,
|
|
||||||
)
|
)
|
||||||
distinct_values = None
|
distinct_values = None
|
||||||
try:
|
try:
|
||||||
|
|
@ -179,25 +157,21 @@ class ColumnFacet(Facet):
|
||||||
suggested_facet_sql,
|
suggested_facet_sql,
|
||||||
self.params,
|
self.params,
|
||||||
truncate=False,
|
truncate=False,
|
||||||
custom_time_limit=self.ds.setting("facet_suggest_time_limit_ms"),
|
custom_time_limit=self.ds.config("facet_suggest_time_limit_ms"),
|
||||||
)
|
)
|
||||||
num_distinct_values = len(distinct_values)
|
num_distinct_values = len(distinct_values)
|
||||||
if (
|
if (
|
||||||
1 < num_distinct_values < row_count
|
num_distinct_values
|
||||||
|
and num_distinct_values > 1
|
||||||
and num_distinct_values <= facet_size
|
and num_distinct_values <= facet_size
|
||||||
# And at least one has n > 1
|
and num_distinct_values < row_count
|
||||||
and any(r["n"] > 1 for r in distinct_values)
|
|
||||||
):
|
):
|
||||||
suggested_facets.append(
|
suggested_facets.append(
|
||||||
{
|
{
|
||||||
"name": column,
|
"name": column,
|
||||||
"toggle_url": self.ds.absolute_url(
|
"toggle_url": self.ds.absolute_url(
|
||||||
self.request,
|
self.request,
|
||||||
self.ds.urls.path(
|
path_with_added_args(self.request, {"_facet": column}),
|
||||||
path_with_added_args(
|
|
||||||
self.request, {"_facet": column}
|
|
||||||
)
|
|
||||||
),
|
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
@ -205,24 +179,13 @@ class ColumnFacet(Facet):
|
||||||
continue
|
continue
|
||||||
return suggested_facets
|
return suggested_facets
|
||||||
|
|
||||||
async def get_row_count(self):
|
|
||||||
if self.row_count is None:
|
|
||||||
self.row_count = (
|
|
||||||
await self.ds.execute(
|
|
||||||
self.database,
|
|
||||||
f"select count(*) from (select * from ({self.sql}) limit {self.suggest_consider})",
|
|
||||||
self.params,
|
|
||||||
)
|
|
||||||
).rows[0][0]
|
|
||||||
return self.row_count
|
|
||||||
|
|
||||||
async def facet_results(self):
|
async def facet_results(self):
|
||||||
facet_results = []
|
facet_results = {}
|
||||||
facets_timed_out = []
|
facets_timed_out = []
|
||||||
|
|
||||||
qs_pairs = self.get_querystring_pairs()
|
qs_pairs = self.get_querystring_pairs()
|
||||||
|
|
||||||
facet_size = self.get_facet_size()
|
facet_size = self.ds.config("default_facet_size")
|
||||||
for source_and_config in self.get_configs():
|
for source_and_config in self.get_configs():
|
||||||
config = source_and_config["config"]
|
config = source_and_config["config"]
|
||||||
source = source_and_config["source"]
|
source = source_and_config["source"]
|
||||||
|
|
@ -232,7 +195,7 @@ class ColumnFacet(Facet):
|
||||||
{sql}
|
{sql}
|
||||||
)
|
)
|
||||||
where {col} is not null
|
where {col} is not null
|
||||||
group by {col} order by count desc, value limit {limit}
|
group by {col} order by count desc limit {limit}
|
||||||
""".format(
|
""".format(
|
||||||
col=escape_sqlite(column), sql=self.sql, limit=facet_size + 1
|
col=escape_sqlite(column), sql=self.sql, limit=facet_size + 1
|
||||||
)
|
)
|
||||||
|
|
@ -242,42 +205,37 @@ class ColumnFacet(Facet):
|
||||||
facet_sql,
|
facet_sql,
|
||||||
self.params,
|
self.params,
|
||||||
truncate=False,
|
truncate=False,
|
||||||
custom_time_limit=self.ds.setting("facet_time_limit_ms"),
|
custom_time_limit=self.ds.config("facet_time_limit_ms"),
|
||||||
)
|
)
|
||||||
facet_results_values = []
|
facet_results_values = []
|
||||||
facet_results.append(
|
facet_results[column] = {
|
||||||
{
|
"name": column,
|
||||||
"name": column,
|
"type": self.type,
|
||||||
"type": self.type,
|
"hideable": source != "metadata",
|
||||||
"hideable": source != "metadata",
|
"toggle_url": path_with_removed_args(
|
||||||
"toggle_url": self.ds.urls.path(
|
self.request, {"_facet": column}
|
||||||
path_with_removed_args(self.request, {"_facet": column})
|
),
|
||||||
),
|
"results": facet_results_values,
|
||||||
"results": facet_results_values,
|
"truncated": len(facet_rows_results) > facet_size,
|
||||||
"truncated": len(facet_rows_results) > facet_size,
|
}
|
||||||
}
|
|
||||||
)
|
|
||||||
facet_rows = facet_rows_results.rows[:facet_size]
|
facet_rows = facet_rows_results.rows[:facet_size]
|
||||||
if self.table:
|
if self.table:
|
||||||
# Attempt to expand foreign keys into labels
|
# Attempt to expand foreign keys into labels
|
||||||
values = [row["value"] for row in facet_rows]
|
values = [row["value"] for row in facet_rows]
|
||||||
expanded = await self.ds.expand_foreign_keys(
|
expanded = await self.ds.expand_foreign_keys(
|
||||||
self.request.actor, self.database, self.table, column, values
|
self.database, self.table, column, values
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
expanded = {}
|
expanded = {}
|
||||||
for row in facet_rows:
|
for row in facet_rows:
|
||||||
column_qs = column
|
selected = (column, str(row["value"])) in qs_pairs
|
||||||
if column.startswith("_"):
|
|
||||||
column_qs = "{}__exact".format(column)
|
|
||||||
selected = (column_qs, str(row["value"])) in qs_pairs
|
|
||||||
if selected:
|
if selected:
|
||||||
toggle_path = path_with_removed_args(
|
toggle_path = path_with_removed_args(
|
||||||
self.request, {column_qs: str(row["value"])}
|
self.request, {column: str(row["value"])}
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
toggle_path = path_with_added_args(
|
toggle_path = path_with_added_args(
|
||||||
self.request, {column_qs: row["value"]}
|
self.request, {column: row["value"]}
|
||||||
)
|
)
|
||||||
facet_results_values.append(
|
facet_results_values.append(
|
||||||
{
|
{
|
||||||
|
|
@ -285,7 +243,7 @@ class ColumnFacet(Facet):
|
||||||
"label": expanded.get((column, row["value"]), row["value"]),
|
"label": expanded.get((column, row["value"]), row["value"]),
|
||||||
"count": row["count"],
|
"count": row["count"],
|
||||||
"toggle_url": self.ds.absolute_url(
|
"toggle_url": self.ds.absolute_url(
|
||||||
self.request, self.ds.urls.path(toggle_path)
|
self.request, toggle_path
|
||||||
),
|
),
|
||||||
"selected": selected,
|
"selected": selected,
|
||||||
}
|
}
|
||||||
|
|
@ -299,16 +257,6 @@ class ColumnFacet(Facet):
|
||||||
class ArrayFacet(Facet):
|
class ArrayFacet(Facet):
|
||||||
type = "array"
|
type = "array"
|
||||||
|
|
||||||
def _is_json_array_of_strings(self, json_string):
|
|
||||||
try:
|
|
||||||
array = json.loads(json_string)
|
|
||||||
except ValueError:
|
|
||||||
return False
|
|
||||||
for item in array:
|
|
||||||
if not isinstance(item, str):
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
async def suggest(self):
|
async def suggest(self):
|
||||||
columns = await self.get_columns(self.sql, self.params)
|
columns = await self.get_columns(self.sql, self.params)
|
||||||
suggested_facets = []
|
suggested_facets = []
|
||||||
|
|
@ -318,14 +266,10 @@ class ArrayFacet(Facet):
|
||||||
continue
|
continue
|
||||||
# Is every value in this column either null or a JSON array?
|
# Is every value in this column either null or a JSON array?
|
||||||
suggested_facet_sql = """
|
suggested_facet_sql = """
|
||||||
with limited as (select * from ({sql}) limit {suggest_consider})
|
|
||||||
select distinct json_type({column})
|
select distinct json_type({column})
|
||||||
from limited
|
from ({sql})
|
||||||
where {column} is not null and {column} != ''
|
|
||||||
""".format(
|
""".format(
|
||||||
column=escape_sqlite(column),
|
column=escape_sqlite(column), sql=self.sql
|
||||||
sql=self.sql,
|
|
||||||
suggest_consider=self.suggest_consider,
|
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
results = await self.ds.execute(
|
results = await self.ds.execute(
|
||||||
|
|
@ -333,86 +277,44 @@ class ArrayFacet(Facet):
|
||||||
suggested_facet_sql,
|
suggested_facet_sql,
|
||||||
self.params,
|
self.params,
|
||||||
truncate=False,
|
truncate=False,
|
||||||
custom_time_limit=self.ds.setting("facet_suggest_time_limit_ms"),
|
custom_time_limit=self.ds.config("facet_suggest_time_limit_ms"),
|
||||||
log_sql_errors=False,
|
log_sql_errors=False,
|
||||||
)
|
)
|
||||||
types = tuple(r[0] for r in results.rows)
|
types = tuple(r[0] for r in results.rows)
|
||||||
if types in (("array",), ("array", None)):
|
if types in (("array",), ("array", None)):
|
||||||
# Now check that first 100 arrays contain only strings
|
suggested_facets.append(
|
||||||
first_100 = [
|
{
|
||||||
v[0]
|
"name": column,
|
||||||
for v in await self.ds.execute(
|
"type": "array",
|
||||||
self.database,
|
"toggle_url": self.ds.absolute_url(
|
||||||
(
|
self.request,
|
||||||
"select {column} from ({sql}) "
|
path_with_added_args(
|
||||||
"where {column} is not null "
|
self.request, {"_facet_array": column}
|
||||||
"and {column} != '' "
|
|
||||||
"and json_array_length({column}) > 0 "
|
|
||||||
"limit 100"
|
|
||||||
).format(column=escape_sqlite(column), sql=self.sql),
|
|
||||||
self.params,
|
|
||||||
truncate=False,
|
|
||||||
custom_time_limit=self.ds.setting(
|
|
||||||
"facet_suggest_time_limit_ms"
|
|
||||||
),
|
|
||||||
log_sql_errors=False,
|
|
||||||
)
|
|
||||||
]
|
|
||||||
if first_100 and all(
|
|
||||||
self._is_json_array_of_strings(r) for r in first_100
|
|
||||||
):
|
|
||||||
suggested_facets.append(
|
|
||||||
{
|
|
||||||
"name": column,
|
|
||||||
"type": "array",
|
|
||||||
"toggle_url": self.ds.absolute_url(
|
|
||||||
self.request,
|
|
||||||
self.ds.urls.path(
|
|
||||||
path_with_added_args(
|
|
||||||
self.request, {"_facet_array": column}
|
|
||||||
)
|
|
||||||
),
|
|
||||||
),
|
),
|
||||||
}
|
),
|
||||||
)
|
}
|
||||||
|
)
|
||||||
except (QueryInterrupted, sqlite3.OperationalError):
|
except (QueryInterrupted, sqlite3.OperationalError):
|
||||||
continue
|
continue
|
||||||
return suggested_facets
|
return suggested_facets
|
||||||
|
|
||||||
async def facet_results(self):
|
async def facet_results(self):
|
||||||
# self.configs should be a plain list of columns
|
# self.configs should be a plain list of columns
|
||||||
facet_results = []
|
facet_results = {}
|
||||||
facets_timed_out = []
|
facets_timed_out = []
|
||||||
|
|
||||||
facet_size = self.get_facet_size()
|
facet_size = self.ds.config("default_facet_size")
|
||||||
for source_and_config in self.get_configs():
|
for source_and_config in self.get_configs():
|
||||||
config = source_and_config["config"]
|
config = source_and_config["config"]
|
||||||
source = source_and_config["source"]
|
source = source_and_config["source"]
|
||||||
column = config.get("column") or config["simple"]
|
column = config.get("column") or config["simple"]
|
||||||
# https://github.com/simonw/datasette/issues/448
|
|
||||||
facet_sql = """
|
facet_sql = """
|
||||||
with inner as ({sql}),
|
select j.value as value, count(*) as count from (
|
||||||
deduped_array_items as (
|
{sql}
|
||||||
select
|
) join json_each({col}) j
|
||||||
distinct j.value,
|
group by j.value order by count desc limit {limit}
|
||||||
inner.*
|
|
||||||
from
|
|
||||||
json_each([inner].{col}) j
|
|
||||||
join inner
|
|
||||||
)
|
|
||||||
select
|
|
||||||
value as value,
|
|
||||||
count(*) as count
|
|
||||||
from
|
|
||||||
deduped_array_items
|
|
||||||
group by
|
|
||||||
value
|
|
||||||
order by
|
|
||||||
count(*) desc, value limit {limit}
|
|
||||||
""".format(
|
""".format(
|
||||||
col=escape_sqlite(column),
|
col=escape_sqlite(column), sql=self.sql, limit=facet_size + 1
|
||||||
sql=self.sql,
|
|
||||||
limit=facet_size + 1,
|
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
facet_rows_results = await self.ds.execute(
|
facet_rows_results = await self.ds.execute(
|
||||||
|
|
@ -420,35 +322,31 @@ class ArrayFacet(Facet):
|
||||||
facet_sql,
|
facet_sql,
|
||||||
self.params,
|
self.params,
|
||||||
truncate=False,
|
truncate=False,
|
||||||
custom_time_limit=self.ds.setting("facet_time_limit_ms"),
|
custom_time_limit=self.ds.config("facet_time_limit_ms"),
|
||||||
)
|
)
|
||||||
facet_results_values = []
|
facet_results_values = []
|
||||||
facet_results.append(
|
facet_results[column] = {
|
||||||
{
|
"name": column,
|
||||||
"name": column,
|
"type": self.type,
|
||||||
"type": self.type,
|
"results": facet_results_values,
|
||||||
"results": facet_results_values,
|
"hideable": source != "metadata",
|
||||||
"hideable": source != "metadata",
|
"toggle_url": path_with_removed_args(
|
||||||
"toggle_url": self.ds.urls.path(
|
self.request, {"_facet_array": column}
|
||||||
path_with_removed_args(
|
),
|
||||||
self.request, {"_facet_array": column}
|
"truncated": len(facet_rows_results) > facet_size,
|
||||||
)
|
}
|
||||||
),
|
|
||||||
"truncated": len(facet_rows_results) > facet_size,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
facet_rows = facet_rows_results.rows[:facet_size]
|
facet_rows = facet_rows_results.rows[:facet_size]
|
||||||
pairs = self.get_querystring_pairs()
|
pairs = self.get_querystring_pairs()
|
||||||
for row in facet_rows:
|
for row in facet_rows:
|
||||||
value = str(row["value"])
|
value = str(row["value"])
|
||||||
selected = (f"{column}__arraycontains", value) in pairs
|
selected = ("{}__arraycontains".format(column), value) in pairs
|
||||||
if selected:
|
if selected:
|
||||||
toggle_path = path_with_removed_args(
|
toggle_path = path_with_removed_args(
|
||||||
self.request, {f"{column}__arraycontains": value}
|
self.request, {"{}__arraycontains".format(column): value}
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
toggle_path = path_with_added_args(
|
toggle_path = path_with_added_args(
|
||||||
self.request, {f"{column}__arraycontains": value}
|
self.request, {"{}__arraycontains".format(column): value}
|
||||||
)
|
)
|
||||||
facet_results_values.append(
|
facet_results_values.append(
|
||||||
{
|
{
|
||||||
|
|
@ -480,8 +378,8 @@ class DateFacet(Facet):
|
||||||
# Does this column contain any dates in the first 100 rows?
|
# Does this column contain any dates in the first 100 rows?
|
||||||
suggested_facet_sql = """
|
suggested_facet_sql = """
|
||||||
select date({column}) from (
|
select date({column}) from (
|
||||||
select * from ({sql}) limit 100
|
{sql}
|
||||||
) where {column} glob "????-??-*"
|
) where {column} glob "????-??-*" limit 100;
|
||||||
""".format(
|
""".format(
|
||||||
column=escape_sqlite(column), sql=self.sql
|
column=escape_sqlite(column), sql=self.sql
|
||||||
)
|
)
|
||||||
|
|
@ -491,7 +389,7 @@ class DateFacet(Facet):
|
||||||
suggested_facet_sql,
|
suggested_facet_sql,
|
||||||
self.params,
|
self.params,
|
||||||
truncate=False,
|
truncate=False,
|
||||||
custom_time_limit=self.ds.setting("facet_suggest_time_limit_ms"),
|
custom_time_limit=self.ds.config("facet_suggest_time_limit_ms"),
|
||||||
log_sql_errors=False,
|
log_sql_errors=False,
|
||||||
)
|
)
|
||||||
values = tuple(r[0] for r in results.rows)
|
values = tuple(r[0] for r in results.rows)
|
||||||
|
|
@ -502,10 +400,8 @@ class DateFacet(Facet):
|
||||||
"type": "date",
|
"type": "date",
|
||||||
"toggle_url": self.ds.absolute_url(
|
"toggle_url": self.ds.absolute_url(
|
||||||
self.request,
|
self.request,
|
||||||
self.ds.urls.path(
|
path_with_added_args(
|
||||||
path_with_added_args(
|
self.request, {"_facet_date": column}
|
||||||
self.request, {"_facet_date": column}
|
|
||||||
)
|
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
|
|
@ -515,10 +411,10 @@ class DateFacet(Facet):
|
||||||
return suggested_facets
|
return suggested_facets
|
||||||
|
|
||||||
async def facet_results(self):
|
async def facet_results(self):
|
||||||
facet_results = []
|
facet_results = {}
|
||||||
facets_timed_out = []
|
facets_timed_out = []
|
||||||
args = dict(self.get_querystring_pairs())
|
args = dict(self.get_querystring_pairs())
|
||||||
facet_size = self.get_facet_size()
|
facet_size = self.ds.config("default_facet_size")
|
||||||
for source_and_config in self.get_configs():
|
for source_and_config in self.get_configs():
|
||||||
config = source_and_config["config"]
|
config = source_and_config["config"]
|
||||||
source = source_and_config["source"]
|
source = source_and_config["source"]
|
||||||
|
|
@ -529,7 +425,7 @@ class DateFacet(Facet):
|
||||||
{sql}
|
{sql}
|
||||||
)
|
)
|
||||||
where date({col}) is not null
|
where date({col}) is not null
|
||||||
group by date({col}) order by count desc, value limit {limit}
|
group by date({col}) order by count desc limit {limit}
|
||||||
""".format(
|
""".format(
|
||||||
col=escape_sqlite(column), sql=self.sql, limit=facet_size + 1
|
col=escape_sqlite(column), sql=self.sql, limit=facet_size + 1
|
||||||
)
|
)
|
||||||
|
|
@ -539,31 +435,31 @@ class DateFacet(Facet):
|
||||||
facet_sql,
|
facet_sql,
|
||||||
self.params,
|
self.params,
|
||||||
truncate=False,
|
truncate=False,
|
||||||
custom_time_limit=self.ds.setting("facet_time_limit_ms"),
|
custom_time_limit=self.ds.config("facet_time_limit_ms"),
|
||||||
)
|
)
|
||||||
facet_results_values = []
|
facet_results_values = []
|
||||||
facet_results.append(
|
facet_results[column] = {
|
||||||
{
|
"name": column,
|
||||||
"name": column,
|
"type": self.type,
|
||||||
"type": self.type,
|
"results": facet_results_values,
|
||||||
"results": facet_results_values,
|
"hideable": source != "metadata",
|
||||||
"hideable": source != "metadata",
|
"toggle_url": path_with_removed_args(
|
||||||
"toggle_url": path_with_removed_args(
|
self.request, {"_facet_date": column}
|
||||||
self.request, {"_facet_date": column}
|
),
|
||||||
),
|
"truncated": len(facet_rows_results) > facet_size,
|
||||||
"truncated": len(facet_rows_results) > facet_size,
|
}
|
||||||
}
|
|
||||||
)
|
|
||||||
facet_rows = facet_rows_results.rows[:facet_size]
|
facet_rows = facet_rows_results.rows[:facet_size]
|
||||||
for row in facet_rows:
|
for row in facet_rows:
|
||||||
selected = str(args.get(f"{column}__date")) == str(row["value"])
|
selected = str(args.get("{}__date".format(column))) == str(
|
||||||
|
row["value"]
|
||||||
|
)
|
||||||
if selected:
|
if selected:
|
||||||
toggle_path = path_with_removed_args(
|
toggle_path = path_with_removed_args(
|
||||||
self.request, {f"{column}__date": str(row["value"])}
|
self.request, {"{}__date".format(column): str(row["value"])}
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
toggle_path = path_with_added_args(
|
toggle_path = path_with_added_args(
|
||||||
self.request, {f"{column}__date": row["value"]}
|
self.request, {"{}__date".format(column): row["value"]}
|
||||||
)
|
)
|
||||||
facet_results_values.append(
|
facet_results_values.append(
|
||||||
{
|
{
|
||||||
|
|
@ -580,3 +476,190 @@ class DateFacet(Facet):
|
||||||
facets_timed_out.append(column)
|
facets_timed_out.append(column)
|
||||||
|
|
||||||
return facet_results, facets_timed_out
|
return facet_results, facets_timed_out
|
||||||
|
|
||||||
|
|
||||||
|
class ManyToManyFacet(Facet):
|
||||||
|
type = "m2m"
|
||||||
|
|
||||||
|
async def suggest(self):
|
||||||
|
# This is calculated based on foreign key relationships to this table
|
||||||
|
# Are there any many-to-many tables pointing here?
|
||||||
|
suggested_facets = []
|
||||||
|
db = self.ds.databases[self.database]
|
||||||
|
all_foreign_keys = await db.get_all_foreign_keys()
|
||||||
|
if not all_foreign_keys.get(self.table):
|
||||||
|
# It's probably a view
|
||||||
|
return []
|
||||||
|
args = set(self.get_querystring_pairs())
|
||||||
|
incoming = all_foreign_keys[self.table]["incoming"]
|
||||||
|
# Do any of these incoming tables have exactly two outgoing keys?
|
||||||
|
for fk in incoming:
|
||||||
|
other_table = fk["other_table"]
|
||||||
|
other_table_outgoing_foreign_keys = all_foreign_keys[other_table][
|
||||||
|
"outgoing"
|
||||||
|
]
|
||||||
|
if len(other_table_outgoing_foreign_keys) == 2:
|
||||||
|
destination_table = [
|
||||||
|
t
|
||||||
|
for t in other_table_outgoing_foreign_keys
|
||||||
|
if t["other_table"] != self.table
|
||||||
|
][0]["other_table"]
|
||||||
|
# Only suggest if it's not selected already
|
||||||
|
if ("_facet_m2m", destination_table) in args:
|
||||||
|
continue
|
||||||
|
suggested_facets.append(
|
||||||
|
{
|
||||||
|
"name": destination_table,
|
||||||
|
"type": "m2m",
|
||||||
|
"toggle_url": self.ds.absolute_url(
|
||||||
|
self.request,
|
||||||
|
path_with_added_args(
|
||||||
|
self.request, {"_facet_m2m": destination_table}
|
||||||
|
),
|
||||||
|
),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return suggested_facets
|
||||||
|
|
||||||
|
async def facet_results(self):
|
||||||
|
facet_results = {}
|
||||||
|
facets_timed_out = []
|
||||||
|
args = set(self.get_querystring_pairs())
|
||||||
|
facet_size = self.ds.config("default_facet_size")
|
||||||
|
db = self.ds.databases[self.database]
|
||||||
|
all_foreign_keys = await db.get_all_foreign_keys()
|
||||||
|
if not all_foreign_keys.get(self.table):
|
||||||
|
return [], []
|
||||||
|
# We care about three tables: self.table, middle_table and destination_table
|
||||||
|
incoming = all_foreign_keys[self.table]["incoming"]
|
||||||
|
for source_and_config in self.get_configs():
|
||||||
|
config = source_and_config["config"]
|
||||||
|
source = source_and_config["source"]
|
||||||
|
# The destination_table is specified in the _facet_m2m=xxx parameter
|
||||||
|
destination_table = config.get("column") or config["simple"]
|
||||||
|
# Find middle table - it has fks to self.table AND destination_table
|
||||||
|
fks = None
|
||||||
|
middle_table = None
|
||||||
|
for fk in incoming:
|
||||||
|
other_table = fk["other_table"]
|
||||||
|
other_table_outgoing_foreign_keys = all_foreign_keys[other_table][
|
||||||
|
"outgoing"
|
||||||
|
]
|
||||||
|
if (
|
||||||
|
any(
|
||||||
|
o
|
||||||
|
for o in other_table_outgoing_foreign_keys
|
||||||
|
if o["other_table"] == destination_table
|
||||||
|
)
|
||||||
|
and len(other_table_outgoing_foreign_keys) == 2
|
||||||
|
):
|
||||||
|
fks = other_table_outgoing_foreign_keys
|
||||||
|
middle_table = other_table
|
||||||
|
break
|
||||||
|
if middle_table is None or fks is None:
|
||||||
|
return [], []
|
||||||
|
# Now that we have determined the middle_table, we need to figure out the three
|
||||||
|
# columns on that table which are relevant to us. These are:
|
||||||
|
# column_to_table - the middle_table column with a foreign key to self.table
|
||||||
|
# table_pk - the primary key column on self.table that is referenced
|
||||||
|
# column_to_destination - the column with a foreign key to destination_table
|
||||||
|
#
|
||||||
|
# It turns out we don't actually need the fourth obvious column:
|
||||||
|
# destination_pk = the primary key column on destination_table which is referenced
|
||||||
|
#
|
||||||
|
# These are both in the fks array - which now contains 2 foreign key relationships, e.g:
|
||||||
|
# [
|
||||||
|
# {'other_table': 'characteristic', 'column': 'characteristic_id', 'other_column': 'pk'},
|
||||||
|
# {'other_table': 'attractions', 'column': 'attraction_id', 'other_column': 'pk'}
|
||||||
|
# ]
|
||||||
|
column_to_table = None
|
||||||
|
table_pk = None
|
||||||
|
column_to_destination = None
|
||||||
|
for fk in fks:
|
||||||
|
if fk["other_table"] == self.table:
|
||||||
|
table_pk = fk["other_column"]
|
||||||
|
column_to_table = fk["column"]
|
||||||
|
elif fk["other_table"] == destination_table:
|
||||||
|
column_to_destination = fk["column"]
|
||||||
|
assert all((column_to_table, table_pk, column_to_destination))
|
||||||
|
facet_sql = """
|
||||||
|
select
|
||||||
|
{middle_table}.{column_to_destination} as value,
|
||||||
|
count(distinct {middle_table}.{column_to_table}) as count
|
||||||
|
from {middle_table}
|
||||||
|
where {middle_table}.{column_to_table} in (
|
||||||
|
select {table_pk} from ({sql})
|
||||||
|
)
|
||||||
|
group by {middle_table}.{column_to_destination}
|
||||||
|
order by count desc limit {limit}
|
||||||
|
""".format(
|
||||||
|
sql=self.sql,
|
||||||
|
limit=facet_size + 1,
|
||||||
|
middle_table=escape_sqlite(middle_table),
|
||||||
|
column_to_destination=escape_sqlite(column_to_destination),
|
||||||
|
column_to_table=escape_sqlite(column_to_table),
|
||||||
|
table_pk=escape_sqlite(table_pk),
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
facet_rows_results = await self.ds.execute(
|
||||||
|
self.database,
|
||||||
|
facet_sql,
|
||||||
|
self.params,
|
||||||
|
truncate=False,
|
||||||
|
custom_time_limit=self.ds.config("facet_time_limit_ms"),
|
||||||
|
)
|
||||||
|
facet_results_values = []
|
||||||
|
facet_results[destination_table] = {
|
||||||
|
"name": destination_table,
|
||||||
|
"type": self.type,
|
||||||
|
"results": facet_results_values,
|
||||||
|
"hideable": source != "metadata",
|
||||||
|
"toggle_url": path_with_removed_args(
|
||||||
|
self.request, {"_facet_m2m": destination_table}
|
||||||
|
),
|
||||||
|
"truncated": len(facet_rows_results) > facet_size,
|
||||||
|
}
|
||||||
|
facet_rows = facet_rows_results.rows[:facet_size]
|
||||||
|
|
||||||
|
# Attempt to expand foreign keys into labels
|
||||||
|
values = [row["value"] for row in facet_rows]
|
||||||
|
expanded = await self.ds.expand_foreign_keys(
|
||||||
|
self.database, middle_table, column_to_destination, values
|
||||||
|
)
|
||||||
|
|
||||||
|
for row in facet_rows:
|
||||||
|
through = json.dumps(
|
||||||
|
{
|
||||||
|
"table": middle_table,
|
||||||
|
"column": column_to_destination,
|
||||||
|
"value": str(row["value"]),
|
||||||
|
},
|
||||||
|
separators=(",", ":"),
|
||||||
|
sort_keys=True,
|
||||||
|
)
|
||||||
|
selected = ("_through", through) in args
|
||||||
|
if selected:
|
||||||
|
toggle_path = path_with_removed_args(
|
||||||
|
self.request, {"_through": through}
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
toggle_path = path_with_added_args(
|
||||||
|
self.request, {"_through": through}
|
||||||
|
)
|
||||||
|
facet_results_values.append(
|
||||||
|
{
|
||||||
|
"value": row["value"],
|
||||||
|
"label": expanded.get(
|
||||||
|
(column_to_destination, row["value"]), row["value"]
|
||||||
|
),
|
||||||
|
"count": row["count"],
|
||||||
|
"toggle_url": self.ds.absolute_url(
|
||||||
|
self.request, toggle_path
|
||||||
|
),
|
||||||
|
"selected": selected,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except QueryInterrupted:
|
||||||
|
facets_timed_out.append(destination_table)
|
||||||
|
|
||||||
|
return facet_results, facets_timed_out
|
||||||
|
|
|
||||||
|
|
@ -1,173 +1,7 @@
|
||||||
from datasette import hookimpl
|
|
||||||
from datasette.resources import DatabaseResource
|
|
||||||
from datasette.views.base import DatasetteError
|
|
||||||
from datasette.utils.asgi import BadRequest
|
|
||||||
import json
|
import json
|
||||||
from .utils import detect_json1, escape_sqlite, path_with_removed_args
|
import numbers
|
||||||
|
|
||||||
|
from .utils import detect_json1, escape_sqlite
|
||||||
@hookimpl(specname="filters_from_request")
|
|
||||||
def where_filters(request, database, datasette):
|
|
||||||
# This one deals with ?_where=
|
|
||||||
async def inner():
|
|
||||||
where_clauses = []
|
|
||||||
extra_wheres_for_ui = []
|
|
||||||
if "_where" in request.args:
|
|
||||||
if not await datasette.allowed(
|
|
||||||
action="execute-sql",
|
|
||||||
resource=DatabaseResource(database=database),
|
|
||||||
actor=request.actor,
|
|
||||||
):
|
|
||||||
raise DatasetteError("_where= is not allowed", status=403)
|
|
||||||
else:
|
|
||||||
where_clauses.extend(request.args.getlist("_where"))
|
|
||||||
extra_wheres_for_ui = [
|
|
||||||
{
|
|
||||||
"text": text,
|
|
||||||
"remove_url": path_with_removed_args(request, {"_where": text}),
|
|
||||||
}
|
|
||||||
for text in request.args.getlist("_where")
|
|
||||||
]
|
|
||||||
|
|
||||||
return FilterArguments(
|
|
||||||
where_clauses,
|
|
||||||
extra_context={
|
|
||||||
"extra_wheres_for_ui": extra_wheres_for_ui,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
return inner
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl(specname="filters_from_request")
|
|
||||||
def search_filters(request, database, table, datasette):
|
|
||||||
# ?_search= and _search_colname=
|
|
||||||
async def inner():
|
|
||||||
where_clauses = []
|
|
||||||
params = {}
|
|
||||||
human_descriptions = []
|
|
||||||
extra_context = {}
|
|
||||||
|
|
||||||
# Figure out which fts_table to use
|
|
||||||
table_metadata = await datasette.table_config(database, table)
|
|
||||||
db = datasette.get_database(database)
|
|
||||||
fts_table = request.args.get("_fts_table")
|
|
||||||
fts_table = fts_table or table_metadata.get("fts_table")
|
|
||||||
fts_table = fts_table or await db.fts_table(table)
|
|
||||||
fts_pk = request.args.get("_fts_pk", table_metadata.get("fts_pk", "rowid"))
|
|
||||||
search_args = {
|
|
||||||
key: request.args[key]
|
|
||||||
for key in request.args
|
|
||||||
if key.startswith("_search") and key != "_searchmode"
|
|
||||||
}
|
|
||||||
search = ""
|
|
||||||
search_mode_raw = table_metadata.get("searchmode") == "raw"
|
|
||||||
# Or set search mode from the querystring
|
|
||||||
qs_searchmode = request.args.get("_searchmode")
|
|
||||||
if qs_searchmode == "escaped":
|
|
||||||
search_mode_raw = False
|
|
||||||
if qs_searchmode == "raw":
|
|
||||||
search_mode_raw = True
|
|
||||||
|
|
||||||
extra_context["supports_search"] = bool(fts_table)
|
|
||||||
|
|
||||||
if fts_table and search_args:
|
|
||||||
if "_search" in search_args:
|
|
||||||
# Simple ?_search=xxx
|
|
||||||
search = search_args["_search"]
|
|
||||||
where_clauses.append(
|
|
||||||
"{fts_pk} in (select rowid from {fts_table} where {fts_table} match {match_clause})".format(
|
|
||||||
fts_table=escape_sqlite(fts_table),
|
|
||||||
fts_pk=escape_sqlite(fts_pk),
|
|
||||||
match_clause=(
|
|
||||||
":search" if search_mode_raw else "escape_fts(:search)"
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
human_descriptions.append(f'search matches "{search}"')
|
|
||||||
params["search"] = search
|
|
||||||
extra_context["search"] = search
|
|
||||||
else:
|
|
||||||
# More complex: search against specific columns
|
|
||||||
for i, (key, search_text) in enumerate(search_args.items()):
|
|
||||||
search_col = key.split("_search_", 1)[1]
|
|
||||||
if search_col not in await db.table_columns(fts_table):
|
|
||||||
raise BadRequest("Cannot search by that column")
|
|
||||||
|
|
||||||
where_clauses.append(
|
|
||||||
"rowid in (select rowid from {fts_table} where {search_col} match {match_clause})".format(
|
|
||||||
fts_table=escape_sqlite(fts_table),
|
|
||||||
search_col=escape_sqlite(search_col),
|
|
||||||
match_clause=(
|
|
||||||
":search_{}".format(i)
|
|
||||||
if search_mode_raw
|
|
||||||
else "escape_fts(:search_{})".format(i)
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
human_descriptions.append(
|
|
||||||
f'search column "{search_col}" matches "{search_text}"'
|
|
||||||
)
|
|
||||||
params[f"search_{i}"] = search_text
|
|
||||||
extra_context["search"] = search_text
|
|
||||||
|
|
||||||
return FilterArguments(where_clauses, params, human_descriptions, extra_context)
|
|
||||||
|
|
||||||
return inner
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl(specname="filters_from_request")
|
|
||||||
def through_filters(request, database, table, datasette):
|
|
||||||
# ?_search= and _search_colname=
|
|
||||||
async def inner():
|
|
||||||
where_clauses = []
|
|
||||||
params = {}
|
|
||||||
human_descriptions = []
|
|
||||||
extra_context = {}
|
|
||||||
|
|
||||||
# Support for ?_through={table, column, value}
|
|
||||||
if "_through" in request.args:
|
|
||||||
for through in request.args.getlist("_through"):
|
|
||||||
through_data = json.loads(through)
|
|
||||||
through_table = through_data["table"]
|
|
||||||
other_column = through_data["column"]
|
|
||||||
value = through_data["value"]
|
|
||||||
db = datasette.get_database(database)
|
|
||||||
outgoing_foreign_keys = await db.foreign_keys_for_table(through_table)
|
|
||||||
try:
|
|
||||||
fk_to_us = [
|
|
||||||
fk for fk in outgoing_foreign_keys if fk["other_table"] == table
|
|
||||||
][0]
|
|
||||||
except IndexError:
|
|
||||||
raise DatasetteError(
|
|
||||||
"Invalid _through - could not find corresponding foreign key"
|
|
||||||
)
|
|
||||||
param = f"p{len(params)}"
|
|
||||||
where_clauses.append(
|
|
||||||
"{our_pk} in (select {our_column} from {through_table} where {other_column} = :{param})".format(
|
|
||||||
through_table=escape_sqlite(through_table),
|
|
||||||
our_pk=escape_sqlite(fk_to_us["other_column"]),
|
|
||||||
our_column=escape_sqlite(fk_to_us["column"]),
|
|
||||||
other_column=escape_sqlite(other_column),
|
|
||||||
param=param,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
params[param] = value
|
|
||||||
human_descriptions.append(f'{through_table}.{other_column} = "{value}"')
|
|
||||||
|
|
||||||
return FilterArguments(where_clauses, params, human_descriptions, extra_context)
|
|
||||||
|
|
||||||
return inner
|
|
||||||
|
|
||||||
|
|
||||||
class FilterArguments:
|
|
||||||
def __init__(
|
|
||||||
self, where_clauses, params=None, human_descriptions=None, extra_context=None
|
|
||||||
):
|
|
||||||
self.where_clauses = where_clauses
|
|
||||||
self.params = params or {}
|
|
||||||
self.human_descriptions = human_descriptions or []
|
|
||||||
self.extra_context = extra_context or {}
|
|
||||||
|
|
||||||
|
|
||||||
class Filter:
|
class Filter:
|
||||||
|
|
@ -209,7 +43,7 @@ class TemplatedFilter(Filter):
|
||||||
kwargs = {"c": column}
|
kwargs = {"c": column}
|
||||||
converted = None
|
converted = None
|
||||||
else:
|
else:
|
||||||
kwargs = {"c": column, "p": f"p{param_counter}", "t": table}
|
kwargs = {"c": column, "p": "p{}".format(param_counter), "t": table}
|
||||||
return self.sql_template.format(**kwargs), converted
|
return self.sql_template.format(**kwargs), converted
|
||||||
|
|
||||||
def human_clause(self, column, value):
|
def human_clause(self, column, value):
|
||||||
|
|
@ -235,26 +69,12 @@ class InFilter(Filter):
|
||||||
|
|
||||||
def where_clause(self, table, column, value, param_counter):
|
def where_clause(self, table, column, value, param_counter):
|
||||||
values = self.split_value(value)
|
values = self.split_value(value)
|
||||||
params = [f":p{param_counter + i}" for i in range(len(values))]
|
params = [":p{}".format(param_counter + i) for i in range(len(values))]
|
||||||
sql = f"{escape_sqlite(column)} in ({', '.join(params)})"
|
sql = "{} in ({})".format(escape_sqlite(column), ", ".join(params))
|
||||||
return sql, values
|
return sql, values
|
||||||
|
|
||||||
def human_clause(self, column, value):
|
def human_clause(self, column, value):
|
||||||
return f"{column} in {json.dumps(self.split_value(value))}"
|
return "{} in {}".format(column, json.dumps(self.split_value(value)))
|
||||||
|
|
||||||
|
|
||||||
class NotInFilter(InFilter):
|
|
||||||
key = "notin"
|
|
||||||
display = "not in"
|
|
||||||
|
|
||||||
def where_clause(self, table, column, value, param_counter):
|
|
||||||
values = self.split_value(value)
|
|
||||||
params = [f":p{param_counter + i}" for i in range(len(values))]
|
|
||||||
sql = f"{escape_sqlite(column)} not in ({', '.join(params)})"
|
|
||||||
return sql, values
|
|
||||||
|
|
||||||
def human_clause(self, column, value):
|
|
||||||
return f"{column} not in {json.dumps(self.split_value(value))}"
|
|
||||||
|
|
||||||
|
|
||||||
class Filters:
|
class Filters:
|
||||||
|
|
@ -280,13 +100,6 @@ class Filters:
|
||||||
'{c} contains "{v}"',
|
'{c} contains "{v}"',
|
||||||
format="%{}%",
|
format="%{}%",
|
||||||
),
|
),
|
||||||
TemplatedFilter(
|
|
||||||
"notcontains",
|
|
||||||
"does not contain",
|
|
||||||
'"{c}" not like :{p}',
|
|
||||||
'{c} does not contain "{v}"',
|
|
||||||
format="%{}%",
|
|
||||||
),
|
|
||||||
TemplatedFilter(
|
TemplatedFilter(
|
||||||
"endswith",
|
"endswith",
|
||||||
"ends with",
|
"ends with",
|
||||||
|
|
@ -310,27 +123,20 @@ class Filters:
|
||||||
"lte", "\u2264", '"{c}" <= :{p}', "{c} \u2264 {v}", numeric=True
|
"lte", "\u2264", '"{c}" <= :{p}', "{c} \u2264 {v}", numeric=True
|
||||||
),
|
),
|
||||||
TemplatedFilter("like", "like", '"{c}" like :{p}', '{c} like "{v}"'),
|
TemplatedFilter("like", "like", '"{c}" like :{p}', '{c} like "{v}"'),
|
||||||
TemplatedFilter(
|
|
||||||
"notlike", "not like", '"{c}" not like :{p}', '{c} not like "{v}"'
|
|
||||||
),
|
|
||||||
TemplatedFilter("glob", "glob", '"{c}" glob :{p}', '{c} glob "{v}"'),
|
TemplatedFilter("glob", "glob", '"{c}" glob :{p}', '{c} glob "{v}"'),
|
||||||
InFilter(),
|
InFilter(),
|
||||||
NotInFilter(),
|
|
||||||
]
|
]
|
||||||
+ (
|
+ (
|
||||||
[
|
[
|
||||||
TemplatedFilter(
|
TemplatedFilter(
|
||||||
"arraycontains",
|
"arraycontains",
|
||||||
"array contains",
|
"array contains",
|
||||||
""":{p} in (select value from json_each([{t}].[{c}]))""",
|
"""rowid in (
|
||||||
|
select {t}.rowid from {t}, json_each({t}.{c}) j
|
||||||
|
where j.value = :{p}
|
||||||
|
)""",
|
||||||
'{c} contains "{v}"',
|
'{c} contains "{v}"',
|
||||||
),
|
)
|
||||||
TemplatedFilter(
|
|
||||||
"arraynotcontains",
|
|
||||||
"array does not contain",
|
|
||||||
""":{p} not in (select value from json_each([{t}].[{c}]))""",
|
|
||||||
'{c} does not contain "{v}"',
|
|
||||||
),
|
|
||||||
]
|
]
|
||||||
if detect_json1()
|
if detect_json1()
|
||||||
else []
|
else []
|
||||||
|
|
@ -367,11 +173,13 @@ class Filters:
|
||||||
)
|
)
|
||||||
_filters_by_key = {f.key: f for f in _filters}
|
_filters_by_key = {f.key: f for f in _filters}
|
||||||
|
|
||||||
def __init__(self, pairs):
|
def __init__(self, pairs, units={}, ureg=None):
|
||||||
self.pairs = pairs
|
self.pairs = pairs
|
||||||
|
self.units = units
|
||||||
|
self.ureg = ureg
|
||||||
|
|
||||||
def lookups(self):
|
def lookups(self):
|
||||||
"""Yields (lookup, display, no_argument) pairs"""
|
"Yields (lookup, display, no_argument) pairs"
|
||||||
for filter in self._filters:
|
for filter in self._filters:
|
||||||
yield filter.key, filter.display, filter.no_argument
|
yield filter.key, filter.display, filter.no_argument
|
||||||
|
|
||||||
|
|
@ -393,10 +201,10 @@ class Filters:
|
||||||
s = " and ".join(and_bits)
|
s = " and ".join(and_bits)
|
||||||
if not s:
|
if not s:
|
||||||
return ""
|
return ""
|
||||||
return f"where {s}"
|
return "where {}".format(s)
|
||||||
|
|
||||||
def selections(self):
|
def selections(self):
|
||||||
"""Yields (column, lookup, value) tuples"""
|
"Yields (column, lookup, value) tuples"
|
||||||
for key, value in self.pairs:
|
for key, value in self.pairs:
|
||||||
if "__" in key:
|
if "__" in key:
|
||||||
column, lookup = key.rsplit("__", 1)
|
column, lookup = key.rsplit("__", 1)
|
||||||
|
|
@ -408,6 +216,20 @@ class Filters:
|
||||||
def has_selections(self):
|
def has_selections(self):
|
||||||
return bool(self.pairs)
|
return bool(self.pairs)
|
||||||
|
|
||||||
|
def convert_unit(self, column, value):
|
||||||
|
"If the user has provided a unit in the query, convert it into the column unit, if present."
|
||||||
|
if column not in self.units:
|
||||||
|
return value
|
||||||
|
|
||||||
|
# Try to interpret the value as a unit
|
||||||
|
value = self.ureg(value)
|
||||||
|
if isinstance(value, numbers.Number):
|
||||||
|
# It's just a bare number, assume it's the column unit
|
||||||
|
return value
|
||||||
|
|
||||||
|
column_unit = self.ureg(self.units[column])
|
||||||
|
return value.to(column_unit).magnitude
|
||||||
|
|
||||||
def build_where_clauses(self, table):
|
def build_where_clauses(self, table):
|
||||||
sql_bits = []
|
sql_bits = []
|
||||||
params = {}
|
params = {}
|
||||||
|
|
@ -415,13 +237,15 @@ class Filters:
|
||||||
for column, lookup, value in self.selections():
|
for column, lookup, value in self.selections():
|
||||||
filter = self._filters_by_key.get(lookup, None)
|
filter = self._filters_by_key.get(lookup, None)
|
||||||
if filter:
|
if filter:
|
||||||
sql_bit, param = filter.where_clause(table, column, value, i)
|
sql_bit, param = filter.where_clause(
|
||||||
|
table, column, self.convert_unit(column, value), i
|
||||||
|
)
|
||||||
sql_bits.append(sql_bit)
|
sql_bits.append(sql_bit)
|
||||||
if param is not None:
|
if param is not None:
|
||||||
if not isinstance(param, list):
|
if not isinstance(param, list):
|
||||||
param = [param]
|
param = [param]
|
||||||
for individual_param in param:
|
for individual_param in param:
|
||||||
param_id = f"p{i}"
|
param_id = "p{}".format(i)
|
||||||
params[param_id] = individual_param
|
params[param_id] = individual_param
|
||||||
i += 1
|
i += 1
|
||||||
return sql_bits, params
|
return sql_bits, params
|
||||||
|
|
|
||||||
|
|
@ -1,19 +0,0 @@
|
||||||
from datasette import hookimpl, Response
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl(trylast=True)
|
|
||||||
def forbidden(datasette, request, message):
|
|
||||||
async def inner():
|
|
||||||
return Response.html(
|
|
||||||
await datasette.render_template(
|
|
||||||
"error.html",
|
|
||||||
{
|
|
||||||
"title": "Forbidden",
|
|
||||||
"error": message,
|
|
||||||
},
|
|
||||||
request=request,
|
|
||||||
),
|
|
||||||
status=403,
|
|
||||||
)
|
|
||||||
|
|
||||||
return inner
|
|
||||||
|
|
@ -1,77 +0,0 @@
|
||||||
from datasette import hookimpl, Response
|
|
||||||
from .utils import add_cors_headers
|
|
||||||
from .utils.asgi import (
|
|
||||||
Base400,
|
|
||||||
)
|
|
||||||
from .views.base import DatasetteError
|
|
||||||
from markupsafe import Markup
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
try:
|
|
||||||
import ipdb as pdb
|
|
||||||
except ImportError:
|
|
||||||
import pdb
|
|
||||||
|
|
||||||
try:
|
|
||||||
import rich
|
|
||||||
except ImportError:
|
|
||||||
rich = None
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl(trylast=True)
|
|
||||||
def handle_exception(datasette, request, exception):
|
|
||||||
async def inner():
|
|
||||||
if datasette.pdb:
|
|
||||||
pdb.post_mortem(exception.__traceback__)
|
|
||||||
|
|
||||||
if rich is not None:
|
|
||||||
rich.get_console().print_exception(show_locals=True)
|
|
||||||
|
|
||||||
title = None
|
|
||||||
if isinstance(exception, Base400):
|
|
||||||
status = exception.status
|
|
||||||
info = {}
|
|
||||||
message = exception.args[0]
|
|
||||||
elif isinstance(exception, DatasetteError):
|
|
||||||
status = exception.status
|
|
||||||
info = exception.error_dict
|
|
||||||
message = exception.message
|
|
||||||
if exception.message_is_html:
|
|
||||||
message = Markup(message)
|
|
||||||
title = exception.title
|
|
||||||
else:
|
|
||||||
status = 500
|
|
||||||
info = {}
|
|
||||||
message = str(exception)
|
|
||||||
traceback.print_exc()
|
|
||||||
templates = [f"{status}.html", "error.html"]
|
|
||||||
info.update(
|
|
||||||
{
|
|
||||||
"ok": False,
|
|
||||||
"error": message,
|
|
||||||
"status": status,
|
|
||||||
"title": title,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
headers = {}
|
|
||||||
if datasette.cors:
|
|
||||||
add_cors_headers(headers)
|
|
||||||
if request.path.split("?")[0].endswith(".json"):
|
|
||||||
return Response.json(info, status=status, headers=headers)
|
|
||||||
else:
|
|
||||||
environment = datasette.get_jinja_environment(request)
|
|
||||||
template = environment.select_template(templates)
|
|
||||||
return Response.html(
|
|
||||||
await template.render_async(
|
|
||||||
dict(
|
|
||||||
info,
|
|
||||||
urls=datasette.urls,
|
|
||||||
app_css_hash=datasette.app_css_hash(),
|
|
||||||
menu_links=lambda: [],
|
|
||||||
)
|
|
||||||
),
|
|
||||||
status=status,
|
|
||||||
headers=headers,
|
|
||||||
)
|
|
||||||
|
|
||||||
return inner
|
|
||||||
|
|
@ -5,218 +5,51 @@ hookspec = HookspecMarker("datasette")
|
||||||
hookimpl = HookimplMarker("datasette")
|
hookimpl = HookimplMarker("datasette")
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
|
||||||
def startup(datasette):
|
|
||||||
"""Fires directly after Datasette first starts running"""
|
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def asgi_wrapper(datasette):
|
def asgi_wrapper(datasette):
|
||||||
"""Returns an ASGI middleware callable to wrap our ASGI application with"""
|
"Returns an ASGI middleware callable to wrap our ASGI application with"
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def prepare_connection(conn, database, datasette):
|
def prepare_connection(conn):
|
||||||
"""Modify SQLite connection in some way e.g. register custom SQL functions"""
|
"Modify SQLite connection in some way e.g. register custom SQL functions"
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def prepare_jinja2_environment(env, datasette):
|
def prepare_jinja2_environment(env):
|
||||||
"""Modify Jinja2 template environment e.g. register custom template tags"""
|
"Modify Jinja2 template environment e.g. register custom template tags"
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def extra_css_urls(template, database, table, columns, view_name, request, datasette):
|
def extra_css_urls(template, database, table, datasette):
|
||||||
"""Extra CSS URLs added by this plugin"""
|
"Extra CSS URLs added by this plugin"
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def extra_js_urls(template, database, table, columns, view_name, request, datasette):
|
def extra_js_urls(template, database, table, datasette):
|
||||||
"""Extra JavaScript URLs added by this plugin"""
|
"Extra JavaScript URLs added by this plugin"
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def extra_body_script(
|
def extra_body_script(template, database, table, view_name, datasette):
|
||||||
template, database, table, columns, view_name, request, datasette
|
"Extra JavaScript code to be included in <script> at bottom of body"
|
||||||
):
|
|
||||||
"""Extra JavaScript code to be included in <script> at bottom of body"""
|
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
|
||||||
def extra_template_vars(
|
|
||||||
template, database, table, columns, view_name, request, datasette
|
|
||||||
):
|
|
||||||
"""Extra template variables to be made available to the template - can return dict or callable or awaitable"""
|
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def publish_subcommand(publish):
|
def publish_subcommand(publish):
|
||||||
"""Subcommands for 'datasette publish'"""
|
"Subcommands for 'datasette publish'"
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec(firstresult=True)
|
||||||
def render_cell(row, value, column, table, database, datasette, request):
|
def render_cell(value, column, table, database, datasette):
|
||||||
"""Customize rendering of HTML table cell values"""
|
"Customize rendering of HTML table cell values"
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def register_output_renderer(datasette):
|
def register_output_renderer(datasette):
|
||||||
"""Register a renderer to output data in a different format"""
|
"Register a renderer to output data in a different format"
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def register_facet_classes():
|
def register_facet_classes():
|
||||||
"""Register Facet subclasses"""
|
"Register Facet subclasses"
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
|
||||||
def register_actions(datasette):
|
|
||||||
"""Register actions: returns a list of datasette.permission.Action objects"""
|
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
|
||||||
def register_routes(datasette):
|
|
||||||
"""Register URL routes: return a list of (regex, view_function) pairs"""
|
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
|
||||||
def register_commands(cli):
|
|
||||||
"""Register additional CLI commands, e.g. 'datasette mycommand ...'"""
|
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
|
||||||
def actor_from_request(datasette, request):
|
|
||||||
"""Return an actor dictionary based on the incoming request"""
|
|
||||||
|
|
||||||
|
|
||||||
@hookspec(firstresult=True)
|
|
||||||
def actors_from_ids(datasette, actor_ids):
|
|
||||||
"""Returns a dictionary mapping those IDs to actor dictionaries"""
|
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
|
||||||
def jinja2_environment_from_request(datasette, request, env):
|
|
||||||
"""Return a Jinja2 environment based on the incoming request"""
|
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
|
||||||
def filters_from_request(request, database, table, datasette):
|
|
||||||
"""
|
|
||||||
Return datasette.filters.FilterArguments(
|
|
||||||
where_clauses=[str, str, str],
|
|
||||||
params={},
|
|
||||||
human_descriptions=[str, str, str],
|
|
||||||
extra_context={}
|
|
||||||
) based on the request"""
|
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
|
||||||
def permission_resources_sql(datasette, actor, action):
|
|
||||||
"""Return SQL query fragments for permission checks on resources.
|
|
||||||
|
|
||||||
Returns None, a PermissionSQL object, or a list of PermissionSQL objects.
|
|
||||||
Each PermissionSQL contains SQL that should return rows with columns:
|
|
||||||
parent (str|None), child (str|None), allow (int), reason (str).
|
|
||||||
|
|
||||||
Used to efficiently check permissions across multiple resources at once.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
|
||||||
def canned_queries(datasette, database, actor):
|
|
||||||
"""Return a dictionary of canned query definitions or an awaitable function that returns them"""
|
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
|
||||||
def register_magic_parameters(datasette):
|
|
||||||
"""Return a list of (name, function) magic parameter functions"""
|
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
|
||||||
def forbidden(datasette, request, message):
|
|
||||||
"""Custom response for a 403 forbidden error"""
|
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
|
||||||
def menu_links(datasette, actor, request):
|
|
||||||
"""Links for the navigation menu"""
|
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
|
||||||
def row_actions(datasette, actor, request, database, table, row):
|
|
||||||
"""Links for the row actions menu"""
|
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
|
||||||
def table_actions(datasette, actor, database, table, request):
|
|
||||||
"""Links for the table actions menu"""
|
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
|
||||||
def view_actions(datasette, actor, database, view, request):
|
|
||||||
"""Links for the view actions menu"""
|
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
|
||||||
def query_actions(datasette, actor, database, query_name, request, sql, params):
|
|
||||||
"""Links for the query and canned query actions menu"""
|
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
|
||||||
def database_actions(datasette, actor, database, request):
|
|
||||||
"""Links for the database actions menu"""
|
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
|
||||||
def homepage_actions(datasette, actor, request):
|
|
||||||
"""Links for the homepage actions menu"""
|
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
|
||||||
def skip_csrf(datasette, scope):
|
|
||||||
"""Mechanism for skipping CSRF checks for certain requests"""
|
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
|
||||||
def handle_exception(datasette, request, exception):
|
|
||||||
"""Handle an uncaught exception. Can return a Response or None."""
|
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
|
||||||
def track_event(datasette, event):
|
|
||||||
"""Respond to an event tracked by Datasette"""
|
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
|
||||||
def register_events(datasette):
|
|
||||||
"""Return a list of Event subclasses to use with track_event()"""
|
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
|
||||||
def top_homepage(datasette, request):
|
|
||||||
"""HTML to include at the top of the homepage"""
|
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
|
||||||
def top_database(datasette, request, database):
|
|
||||||
"""HTML to include at the top of the database page"""
|
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
|
||||||
def top_table(datasette, request, database, table):
|
|
||||||
"""HTML to include at the top of the table page"""
|
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
|
||||||
def top_row(datasette, request, database, table, row):
|
|
||||||
"""HTML to include at the top of the row page"""
|
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
|
||||||
def top_query(datasette, request, database, sql):
|
|
||||||
"""HTML to include at the top of the query results page"""
|
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
|
||||||
def top_canned_query(datasette, request, database, query_name):
|
|
||||||
"""HTML to include at the top of the canned query page"""
|
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,7 @@ HASH_BLOCK_SIZE = 1024 * 1024
|
||||||
|
|
||||||
|
|
||||||
def inspect_hash(path):
|
def inspect_hash(path):
|
||||||
"""Calculate the hash of a database, efficiently."""
|
" Calculate the hash of a database, efficiently. "
|
||||||
m = hashlib.sha256()
|
m = hashlib.sha256()
|
||||||
with path.open("rb") as fp:
|
with path.open("rb") as fp:
|
||||||
while True:
|
while True:
|
||||||
|
|
@ -28,14 +28,14 @@ def inspect_hash(path):
|
||||||
|
|
||||||
|
|
||||||
def inspect_views(conn):
|
def inspect_views(conn):
|
||||||
"""List views in a database."""
|
" List views in a database. "
|
||||||
return [
|
return [
|
||||||
v[0] for v in conn.execute('select name from sqlite_master where type = "view"')
|
v[0] for v in conn.execute('select name from sqlite_master where type = "view"')
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def inspect_tables(conn, database_metadata):
|
def inspect_tables(conn, database_metadata):
|
||||||
"""List tables and their row counts, excluding uninteresting tables."""
|
" List tables and their row counts, excluding uninteresting tables. "
|
||||||
tables = {}
|
tables = {}
|
||||||
table_names = [
|
table_names = [
|
||||||
r["name"]
|
r["name"]
|
||||||
|
|
@ -47,7 +47,7 @@ def inspect_tables(conn, database_metadata):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
count = conn.execute(
|
count = conn.execute(
|
||||||
f"select count(*) from {escape_sqlite(table)}"
|
"select count(*) from {}".format(escape_sqlite(table))
|
||||||
).fetchone()[0]
|
).fetchone()[0]
|
||||||
except sqlite3.OperationalError:
|
except sqlite3.OperationalError:
|
||||||
# This can happen when running against a FTS virtual table
|
# This can happen when running against a FTS virtual table
|
||||||
|
|
|
||||||
|
|
@ -1,210 +0,0 @@
|
||||||
from abc import ABC, abstractmethod
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from typing import Any, NamedTuple
|
|
||||||
import contextvars
|
|
||||||
|
|
||||||
|
|
||||||
# Context variable to track when permission checks should be skipped
|
|
||||||
_skip_permission_checks = contextvars.ContextVar(
|
|
||||||
"skip_permission_checks", default=False
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class SkipPermissions:
|
|
||||||
"""Context manager to temporarily skip permission checks.
|
|
||||||
|
|
||||||
This is not a stable API and may change in future releases.
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
with SkipPermissions():
|
|
||||||
# Permission checks are skipped within this block
|
|
||||||
response = await datasette.client.get("/protected")
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
self.token = _skip_permission_checks.set(True)
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
||||||
_skip_permission_checks.reset(self.token)
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
class Resource(ABC):
|
|
||||||
"""
|
|
||||||
Base class for all resource types.
|
|
||||||
|
|
||||||
Each subclass represents a type of resource (e.g., TableResource, DatabaseResource).
|
|
||||||
The class itself carries metadata about the resource type.
|
|
||||||
Instances represent specific resources.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Class-level metadata (subclasses must define these)
|
|
||||||
name: str = None # e.g., "table", "database", "model"
|
|
||||||
parent_class: type["Resource"] | None = None # e.g., DatabaseResource for tables
|
|
||||||
|
|
||||||
# Instance-level optional extra attributes
|
|
||||||
reasons: list[str] | None = None
|
|
||||||
include_reasons: bool | None = None
|
|
||||||
|
|
||||||
def __init__(self, parent: str | None = None, child: str | None = None):
|
|
||||||
"""
|
|
||||||
Create a resource instance.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
parent: The parent identifier (meaning depends on resource type)
|
|
||||||
child: The child identifier (meaning depends on resource type)
|
|
||||||
"""
|
|
||||||
self.parent = parent
|
|
||||||
self.child = child
|
|
||||||
self._private = None # Sentinel to track if private was set
|
|
||||||
|
|
||||||
@property
|
|
||||||
def private(self) -> bool:
|
|
||||||
"""
|
|
||||||
Whether this resource is private (accessible to actor but not anonymous).
|
|
||||||
|
|
||||||
This property is only available on Resource objects returned from
|
|
||||||
allowed_resources() when include_is_private=True is used.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
AttributeError: If accessed without calling include_is_private=True
|
|
||||||
"""
|
|
||||||
if self._private is None:
|
|
||||||
raise AttributeError(
|
|
||||||
"The 'private' attribute is only available when using "
|
|
||||||
"allowed_resources(..., include_is_private=True)"
|
|
||||||
)
|
|
||||||
return self._private
|
|
||||||
|
|
||||||
@private.setter
|
|
||||||
def private(self, value: bool):
|
|
||||||
self._private = value
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def __init_subclass__(cls):
|
|
||||||
"""
|
|
||||||
Validate resource hierarchy doesn't exceed 2 levels.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: If this resource would create a 3-level hierarchy
|
|
||||||
"""
|
|
||||||
super().__init_subclass__()
|
|
||||||
|
|
||||||
if cls.parent_class is None:
|
|
||||||
return # Top of hierarchy, nothing to validate
|
|
||||||
|
|
||||||
# Check if our parent has a parent - that would create 3 levels
|
|
||||||
if cls.parent_class.parent_class is not None:
|
|
||||||
# We have a parent, and that parent has a parent
|
|
||||||
# This creates a 3-level hierarchy, which is not allowed
|
|
||||||
raise ValueError(
|
|
||||||
f"Resource {cls.__name__} creates a 3-level hierarchy: "
|
|
||||||
f"{cls.parent_class.parent_class.__name__} -> {cls.parent_class.__name__} -> {cls.__name__}. "
|
|
||||||
f"Maximum 2 levels allowed (parent -> child)."
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
@abstractmethod
|
|
||||||
def resources_sql(cls) -> str:
|
|
||||||
"""
|
|
||||||
Return SQL query that returns all resources of this type.
|
|
||||||
|
|
||||||
Must return two columns: parent, child
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class AllowedResource(NamedTuple):
|
|
||||||
"""A resource with the reason it was allowed (for debugging)."""
|
|
||||||
|
|
||||||
resource: Resource
|
|
||||||
reason: str
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True, kw_only=True)
|
|
||||||
class Action:
|
|
||||||
name: str
|
|
||||||
description: str | None
|
|
||||||
abbr: str | None = None
|
|
||||||
resource_class: type[Resource] | None = None
|
|
||||||
also_requires: str | None = None # Optional action name that must also be allowed
|
|
||||||
|
|
||||||
@property
|
|
||||||
def takes_parent(self) -> bool:
|
|
||||||
"""
|
|
||||||
Whether this action requires a parent identifier when instantiating its resource.
|
|
||||||
|
|
||||||
Returns False for global-only actions (no resource_class).
|
|
||||||
Returns True for all actions with a resource_class (all resources require a parent identifier).
|
|
||||||
"""
|
|
||||||
return self.resource_class is not None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def takes_child(self) -> bool:
|
|
||||||
"""
|
|
||||||
Whether this action requires a child identifier when instantiating its resource.
|
|
||||||
|
|
||||||
Returns False for global actions (no resource_class).
|
|
||||||
Returns False for parent-level resources (DatabaseResource - parent_class is None).
|
|
||||||
Returns True for child-level resources (TableResource, QueryResource - have a parent_class).
|
|
||||||
"""
|
|
||||||
if self.resource_class is None:
|
|
||||||
return False
|
|
||||||
return self.resource_class.parent_class is not None
|
|
||||||
|
|
||||||
|
|
||||||
_reason_id = 1
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class PermissionSQL:
|
|
||||||
"""
|
|
||||||
A plugin contributes SQL that yields:
|
|
||||||
parent TEXT NULL,
|
|
||||||
child TEXT NULL,
|
|
||||||
allow INTEGER, -- 1 allow, 0 deny
|
|
||||||
reason TEXT
|
|
||||||
|
|
||||||
For restriction-only plugins, sql can be None and only restriction_sql is provided.
|
|
||||||
"""
|
|
||||||
|
|
||||||
sql: str | None = (
|
|
||||||
None # SQL that SELECTs the 4 columns above (can be None for restriction-only)
|
|
||||||
)
|
|
||||||
params: dict[str, Any] | None = (
|
|
||||||
None # bound params for the SQL (values only; no ':' prefix)
|
|
||||||
)
|
|
||||||
source: str | None = None # System will set this to the plugin name
|
|
||||||
restriction_sql: str | None = (
|
|
||||||
None # Optional SQL that returns (parent, child) for restriction filtering
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def allow(cls, reason: str, _allow: bool = True) -> "PermissionSQL":
|
|
||||||
global _reason_id
|
|
||||||
i = _reason_id
|
|
||||||
_reason_id += 1
|
|
||||||
return cls(
|
|
||||||
sql=f"SELECT NULL AS parent, NULL AS child, {1 if _allow else 0} AS allow, :reason_{i} AS reason",
|
|
||||||
params={f"reason_{i}": reason},
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def deny(cls, reason: str) -> "PermissionSQL":
|
|
||||||
return cls.allow(reason=reason, _allow=False)
|
|
||||||
|
|
||||||
|
|
||||||
# This is obsolete, replaced by Action and ResourceType
|
|
||||||
@dataclass
|
|
||||||
class Permission:
|
|
||||||
name: str
|
|
||||||
abbr: str | None
|
|
||||||
description: str | None
|
|
||||||
takes_database: bool
|
|
||||||
takes_resource: bool
|
|
||||||
default: bool
|
|
||||||
# This is deliberately undocumented: it's considered an internal
|
|
||||||
# implementation detail for view-table/view-database and should
|
|
||||||
# not be used by plugins as it may change in the future.
|
|
||||||
implies_can_view: bool = False
|
|
||||||
|
|
@ -1,124 +1,23 @@
|
||||||
import importlib
|
import importlib
|
||||||
import os
|
|
||||||
import pluggy
|
import pluggy
|
||||||
from pprint import pprint
|
|
||||||
import sys
|
import sys
|
||||||
from . import hookspecs
|
from . import hookspecs
|
||||||
|
|
||||||
if sys.version_info >= (3, 9):
|
|
||||||
import importlib.resources as importlib_resources
|
|
||||||
else:
|
|
||||||
import importlib_resources
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
import importlib.metadata as importlib_metadata
|
|
||||||
else:
|
|
||||||
import importlib_metadata
|
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_PLUGINS = (
|
DEFAULT_PLUGINS = (
|
||||||
"datasette.publish.heroku",
|
"datasette.publish.heroku",
|
||||||
|
"datasette.publish.now",
|
||||||
"datasette.publish.cloudrun",
|
"datasette.publish.cloudrun",
|
||||||
"datasette.facets",
|
"datasette.facets",
|
||||||
"datasette.filters",
|
|
||||||
"datasette.sql_functions",
|
|
||||||
"datasette.actor_auth_cookie",
|
|
||||||
"datasette.default_permissions",
|
|
||||||
"datasette.default_actions",
|
|
||||||
"datasette.default_magic_parameters",
|
|
||||||
"datasette.blob_renderer",
|
|
||||||
"datasette.default_menu_links",
|
|
||||||
"datasette.handle_exception",
|
|
||||||
"datasette.forbidden",
|
|
||||||
"datasette.events",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
pm = pluggy.PluginManager("datasette")
|
pm = pluggy.PluginManager("datasette")
|
||||||
pm.add_hookspecs(hookspecs)
|
pm.add_hookspecs(hookspecs)
|
||||||
|
|
||||||
DATASETTE_TRACE_PLUGINS = os.environ.get("DATASETTE_TRACE_PLUGINS", None)
|
if not hasattr(sys, "_called_from_test"):
|
||||||
|
|
||||||
|
|
||||||
def before(hook_name, hook_impls, kwargs):
|
|
||||||
print(file=sys.stderr)
|
|
||||||
print(f"{hook_name}:", file=sys.stderr)
|
|
||||||
pprint(kwargs, width=40, indent=4, stream=sys.stderr)
|
|
||||||
print("Hook implementations:", file=sys.stderr)
|
|
||||||
pprint(hook_impls, width=40, indent=4, stream=sys.stderr)
|
|
||||||
|
|
||||||
|
|
||||||
def after(outcome, hook_name, hook_impls, kwargs):
|
|
||||||
results = outcome.get_result()
|
|
||||||
if not isinstance(results, list):
|
|
||||||
results = [results]
|
|
||||||
print("Results:", file=sys.stderr)
|
|
||||||
pprint(results, width=40, indent=4, stream=sys.stderr)
|
|
||||||
|
|
||||||
|
|
||||||
if DATASETTE_TRACE_PLUGINS:
|
|
||||||
pm.add_hookcall_monitoring(before, after)
|
|
||||||
|
|
||||||
|
|
||||||
DATASETTE_LOAD_PLUGINS = os.environ.get("DATASETTE_LOAD_PLUGINS", None)
|
|
||||||
|
|
||||||
if not hasattr(sys, "_called_from_test") and DATASETTE_LOAD_PLUGINS is None:
|
|
||||||
# Only load plugins if not running tests
|
# Only load plugins if not running tests
|
||||||
pm.load_setuptools_entrypoints("datasette")
|
pm.load_setuptools_entrypoints("datasette")
|
||||||
|
|
||||||
# Load any plugins specified in DATASETTE_LOAD_PLUGINS")
|
|
||||||
if DATASETTE_LOAD_PLUGINS is not None:
|
|
||||||
for package_name in [
|
|
||||||
name for name in DATASETTE_LOAD_PLUGINS.split(",") if name.strip()
|
|
||||||
]:
|
|
||||||
try:
|
|
||||||
distribution = importlib_metadata.distribution(package_name)
|
|
||||||
entry_points = distribution.entry_points
|
|
||||||
for entry_point in entry_points:
|
|
||||||
if entry_point.group == "datasette":
|
|
||||||
mod = entry_point.load()
|
|
||||||
pm.register(mod, name=entry_point.name)
|
|
||||||
# Ensure name can be found in plugin_to_distinfo later:
|
|
||||||
pm._plugin_distinfo.append((mod, distribution))
|
|
||||||
except importlib_metadata.PackageNotFoundError:
|
|
||||||
sys.stderr.write("Plugin {} could not be found\n".format(package_name))
|
|
||||||
|
|
||||||
|
|
||||||
# Load default plugins
|
# Load default plugins
|
||||||
for plugin in DEFAULT_PLUGINS:
|
for plugin in DEFAULT_PLUGINS:
|
||||||
mod = importlib.import_module(plugin)
|
mod = importlib.import_module(plugin)
|
||||||
pm.register(mod, plugin)
|
pm.register(mod, plugin)
|
||||||
|
|
||||||
|
|
||||||
def get_plugins():
|
|
||||||
plugins = []
|
|
||||||
plugin_to_distinfo = dict(pm.list_plugin_distinfo())
|
|
||||||
for plugin in pm.get_plugins():
|
|
||||||
static_path = None
|
|
||||||
templates_path = None
|
|
||||||
plugin_name = (
|
|
||||||
plugin.__name__
|
|
||||||
if hasattr(plugin, "__name__")
|
|
||||||
else plugin.__class__.__name__
|
|
||||||
)
|
|
||||||
if plugin_name not in DEFAULT_PLUGINS:
|
|
||||||
try:
|
|
||||||
if (importlib_resources.files(plugin_name) / "static").is_dir():
|
|
||||||
static_path = str(importlib_resources.files(plugin_name) / "static")
|
|
||||||
if (importlib_resources.files(plugin_name) / "templates").is_dir():
|
|
||||||
templates_path = str(
|
|
||||||
importlib_resources.files(plugin_name) / "templates"
|
|
||||||
)
|
|
||||||
except (TypeError, ModuleNotFoundError):
|
|
||||||
# Caused by --plugins_dir= plugins
|
|
||||||
pass
|
|
||||||
plugin_info = {
|
|
||||||
"name": plugin_name,
|
|
||||||
"static_path": static_path,
|
|
||||||
"templates_path": templates_path,
|
|
||||||
"hooks": [h.name for h in pm.get_hookcallers(plugin)],
|
|
||||||
}
|
|
||||||
distinfo = plugin_to_distinfo.get(plugin)
|
|
||||||
if distinfo:
|
|
||||||
plugin_info["version"] = distinfo.version
|
|
||||||
plugin_info["name"] = distinfo.name or distinfo.project_name
|
|
||||||
plugins.append(plugin_info)
|
|
||||||
return plugins
|
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,7 @@
|
||||||
from datasette import hookimpl
|
from datasette import hookimpl
|
||||||
import click
|
import click
|
||||||
import json
|
import json
|
||||||
import os
|
from subprocess import check_call, check_output
|
||||||
import re
|
|
||||||
from subprocess import CalledProcessError, check_call, check_output
|
|
||||||
|
|
||||||
from .common import (
|
from .common import (
|
||||||
add_common_publish_arguments_and_options,
|
add_common_publish_arguments_and_options,
|
||||||
|
|
@ -23,66 +21,9 @@ def publish_subcommand(publish):
|
||||||
help="Application name to use when building",
|
help="Application name to use when building",
|
||||||
)
|
)
|
||||||
@click.option(
|
@click.option(
|
||||||
"--service",
|
"--service", default="", help="Cloud Run service to deploy (or over-write)"
|
||||||
default="",
|
|
||||||
help="Cloud Run service to deploy (or over-write)",
|
|
||||||
)
|
)
|
||||||
@click.option("--spatialite", is_flag=True, help="Enable SpatialLite extension")
|
@click.option("--spatialite", is_flag=True, help="Enable SpatialLite extension")
|
||||||
@click.option(
|
|
||||||
"--show-files",
|
|
||||||
is_flag=True,
|
|
||||||
help="Output the generated Dockerfile and metadata.json",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--memory",
|
|
||||||
callback=_validate_memory,
|
|
||||||
help="Memory to allocate in Cloud Run, e.g. 1Gi",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--cpu",
|
|
||||||
type=click.Choice(["1", "2", "4"]),
|
|
||||||
help="Number of vCPUs to allocate in Cloud Run",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--timeout",
|
|
||||||
type=int,
|
|
||||||
help="Build timeout in seconds",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--apt-get-install",
|
|
||||||
"apt_get_extras",
|
|
||||||
multiple=True,
|
|
||||||
help="Additional packages to apt-get install",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--max-instances",
|
|
||||||
type=int,
|
|
||||||
default=1,
|
|
||||||
show_default=True,
|
|
||||||
help="Maximum Cloud Run instances (use 0 to remove the limit)",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--min-instances",
|
|
||||||
type=int,
|
|
||||||
help="Minimum Cloud Run instances",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--artifact-repository",
|
|
||||||
default="datasette",
|
|
||||||
show_default=True,
|
|
||||||
help="Artifact Registry repository to store the image",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--artifact-region",
|
|
||||||
default="us",
|
|
||||||
show_default=True,
|
|
||||||
help="Artifact Registry location (region or multi-region)",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--artifact-project",
|
|
||||||
default=None,
|
|
||||||
help="Project ID for Artifact Registry (defaults to the active project)",
|
|
||||||
)
|
|
||||||
def cloudrun(
|
def cloudrun(
|
||||||
files,
|
files,
|
||||||
metadata,
|
metadata,
|
||||||
|
|
@ -92,9 +33,7 @@ def publish_subcommand(publish):
|
||||||
plugins_dir,
|
plugins_dir,
|
||||||
static,
|
static,
|
||||||
install,
|
install,
|
||||||
plugin_secret,
|
|
||||||
version_note,
|
version_note,
|
||||||
secret,
|
|
||||||
title,
|
title,
|
||||||
license,
|
license,
|
||||||
license_url,
|
license_url,
|
||||||
|
|
@ -105,18 +44,7 @@ def publish_subcommand(publish):
|
||||||
name,
|
name,
|
||||||
service,
|
service,
|
||||||
spatialite,
|
spatialite,
|
||||||
show_files,
|
|
||||||
memory,
|
|
||||||
cpu,
|
|
||||||
timeout,
|
|
||||||
apt_get_extras,
|
|
||||||
max_instances,
|
|
||||||
min_instances,
|
|
||||||
artifact_repository,
|
|
||||||
artifact_region,
|
|
||||||
artifact_project,
|
|
||||||
):
|
):
|
||||||
"Publish databases to Datasette running on Cloud Run"
|
|
||||||
fail_if_publish_binary_not_installed(
|
fail_if_publish_binary_not_installed(
|
||||||
"gcloud", "Google Cloud", "https://cloud.google.com/sdk/"
|
"gcloud", "Google Cloud", "https://cloud.google.com/sdk/"
|
||||||
)
|
)
|
||||||
|
|
@ -124,72 +52,6 @@ def publish_subcommand(publish):
|
||||||
"gcloud config get-value project", shell=True, universal_newlines=True
|
"gcloud config get-value project", shell=True, universal_newlines=True
|
||||||
).strip()
|
).strip()
|
||||||
|
|
||||||
artifact_project = artifact_project or project
|
|
||||||
|
|
||||||
# Ensure Artifact Registry exists for the target image
|
|
||||||
_ensure_artifact_registry(
|
|
||||||
artifact_project=artifact_project,
|
|
||||||
artifact_region=artifact_region,
|
|
||||||
artifact_repository=artifact_repository,
|
|
||||||
)
|
|
||||||
|
|
||||||
artifact_host = (
|
|
||||||
artifact_region
|
|
||||||
if artifact_region.endswith("-docker.pkg.dev")
|
|
||||||
else f"{artifact_region}-docker.pkg.dev"
|
|
||||||
)
|
|
||||||
|
|
||||||
if not service:
|
|
||||||
# Show the user their current services, then prompt for one
|
|
||||||
click.echo("Please provide a service name for this deployment\n")
|
|
||||||
click.echo("Using an existing service name will over-write it")
|
|
||||||
click.echo("")
|
|
||||||
existing_services = get_existing_services()
|
|
||||||
if existing_services:
|
|
||||||
click.echo("Your existing services:\n")
|
|
||||||
for existing_service in existing_services:
|
|
||||||
click.echo(
|
|
||||||
" {name} - created {created} - {url}".format(
|
|
||||||
**existing_service
|
|
||||||
)
|
|
||||||
)
|
|
||||||
click.echo("")
|
|
||||||
service = click.prompt("Service name", type=str)
|
|
||||||
|
|
||||||
image_id = (
|
|
||||||
f"{artifact_host}/{artifact_project}/"
|
|
||||||
f"{artifact_repository}/datasette-{service}"
|
|
||||||
)
|
|
||||||
|
|
||||||
extra_metadata = {
|
|
||||||
"title": title,
|
|
||||||
"license": license,
|
|
||||||
"license_url": license_url,
|
|
||||||
"source": source,
|
|
||||||
"source_url": source_url,
|
|
||||||
"about": about,
|
|
||||||
"about_url": about_url,
|
|
||||||
}
|
|
||||||
|
|
||||||
if not extra_options:
|
|
||||||
extra_options = ""
|
|
||||||
if "force_https_urls" not in extra_options:
|
|
||||||
if extra_options:
|
|
||||||
extra_options += " "
|
|
||||||
extra_options += "--setting force_https_urls on"
|
|
||||||
|
|
||||||
environment_variables = {}
|
|
||||||
if plugin_secret:
|
|
||||||
extra_metadata["plugins"] = {}
|
|
||||||
for plugin_name, plugin_setting, setting_value in plugin_secret:
|
|
||||||
environment_variable = (
|
|
||||||
f"{plugin_name}_{plugin_setting}".upper().replace("-", "_")
|
|
||||||
)
|
|
||||||
environment_variables[environment_variable] = setting_value
|
|
||||||
extra_metadata["plugins"].setdefault(plugin_name, {})[
|
|
||||||
plugin_setting
|
|
||||||
] = {"$env": environment_variable}
|
|
||||||
|
|
||||||
with temporary_docker_directory(
|
with temporary_docker_directory(
|
||||||
files,
|
files,
|
||||||
name,
|
name,
|
||||||
|
|
@ -202,112 +64,21 @@ def publish_subcommand(publish):
|
||||||
install,
|
install,
|
||||||
spatialite,
|
spatialite,
|
||||||
version_note,
|
version_note,
|
||||||
secret,
|
{
|
||||||
extra_metadata,
|
"title": title,
|
||||||
environment_variables,
|
"license": license,
|
||||||
apt_get_extras=apt_get_extras,
|
"license_url": license_url,
|
||||||
|
"source": source,
|
||||||
|
"source_url": source_url,
|
||||||
|
"about": about,
|
||||||
|
"about_url": about_url,
|
||||||
|
},
|
||||||
):
|
):
|
||||||
if show_files:
|
image_id = "gcr.io/{project}/{name}".format(project=project, name=name)
|
||||||
if os.path.exists("metadata.json"):
|
check_call("gcloud builds submit --tag {}".format(image_id), shell=True)
|
||||||
print("=== metadata.json ===\n")
|
|
||||||
with open("metadata.json") as fp:
|
|
||||||
print(fp.read())
|
|
||||||
print("\n==== Dockerfile ====\n")
|
|
||||||
with open("Dockerfile") as fp:
|
|
||||||
print(fp.read())
|
|
||||||
print("\n====================\n")
|
|
||||||
|
|
||||||
check_call(
|
|
||||||
"gcloud builds submit --tag {}{}".format(
|
|
||||||
image_id, " --timeout {}".format(timeout) if timeout else ""
|
|
||||||
),
|
|
||||||
shell=True,
|
|
||||||
)
|
|
||||||
extra_deploy_options = []
|
|
||||||
for option, value in (
|
|
||||||
("--memory", memory),
|
|
||||||
("--cpu", cpu),
|
|
||||||
("--max-instances", max_instances),
|
|
||||||
("--min-instances", min_instances),
|
|
||||||
):
|
|
||||||
if value is not None:
|
|
||||||
extra_deploy_options.append("{} {}".format(option, value))
|
|
||||||
check_call(
|
check_call(
|
||||||
"gcloud run deploy --allow-unauthenticated --platform=managed --image {} {}{}".format(
|
"gcloud beta run deploy --allow-unauthenticated --image {}{}".format(
|
||||||
image_id,
|
image_id, " {}".format(service) if service else ""
|
||||||
service,
|
|
||||||
" " + " ".join(extra_deploy_options) if extra_deploy_options else "",
|
|
||||||
),
|
),
|
||||||
shell=True,
|
shell=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _ensure_artifact_registry(artifact_project, artifact_region, artifact_repository):
|
|
||||||
"""Ensure Artifact Registry API is enabled and the repository exists."""
|
|
||||||
|
|
||||||
enable_cmd = (
|
|
||||||
"gcloud services enable artifactregistry.googleapis.com "
|
|
||||||
f"--project {artifact_project} --quiet"
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
check_call(enable_cmd, shell=True)
|
|
||||||
except CalledProcessError as exc:
|
|
||||||
raise click.ClickException(
|
|
||||||
"Failed to enable artifactregistry.googleapis.com. "
|
|
||||||
"Please ensure you have permissions to manage services."
|
|
||||||
) from exc
|
|
||||||
|
|
||||||
describe_cmd = (
|
|
||||||
"gcloud artifacts repositories describe {repo} --project {project} "
|
|
||||||
"--location {location} --quiet"
|
|
||||||
).format(
|
|
||||||
repo=artifact_repository,
|
|
||||||
project=artifact_project,
|
|
||||||
location=artifact_region,
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
check_call(describe_cmd, shell=True)
|
|
||||||
return
|
|
||||||
except CalledProcessError:
|
|
||||||
create_cmd = (
|
|
||||||
"gcloud artifacts repositories create {repo} --repository-format=docker "
|
|
||||||
'--location {location} --project {project} --description "Datasette Cloud Run images" --quiet'
|
|
||||||
).format(
|
|
||||||
repo=artifact_repository,
|
|
||||||
location=artifact_region,
|
|
||||||
project=artifact_project,
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
check_call(create_cmd, shell=True)
|
|
||||||
click.echo(f"Created Artifact Registry repository '{artifact_repository}'")
|
|
||||||
except CalledProcessError as exc:
|
|
||||||
raise click.ClickException(
|
|
||||||
"Failed to create Artifact Registry repository. "
|
|
||||||
"Use --artifact-repository/--artifact-region to point to an existing repo "
|
|
||||||
"or create one manually."
|
|
||||||
) from exc
|
|
||||||
|
|
||||||
|
|
||||||
def get_existing_services():
|
|
||||||
services = json.loads(
|
|
||||||
check_output(
|
|
||||||
"gcloud run services list --platform=managed --format json",
|
|
||||||
shell=True,
|
|
||||||
universal_newlines=True,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
"name": service["metadata"]["name"],
|
|
||||||
"created": service["metadata"]["creationTimestamp"],
|
|
||||||
"url": service["status"]["address"]["url"],
|
|
||||||
}
|
|
||||||
for service in services
|
|
||||||
if "url" in service["status"]
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def _validate_memory(ctx, param, value):
|
|
||||||
if value and re.match(r"^\d+(Gi|G|Mi|M)$", value) is None:
|
|
||||||
raise click.BadParameter("--memory should be a number then Gi/G/Mi/M e.g 1Gi")
|
|
||||||
return value
|
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,5 @@
|
||||||
from ..utils import StaticMount
|
from ..utils import StaticMount
|
||||||
import click
|
import click
|
||||||
import os
|
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
|
@ -13,13 +12,13 @@ def add_common_publish_arguments_and_options(subcommand):
|
||||||
"-m",
|
"-m",
|
||||||
"--metadata",
|
"--metadata",
|
||||||
type=click.File(mode="r"),
|
type=click.File(mode="r"),
|
||||||
help="Path to JSON/YAML file containing metadata to publish",
|
help="Path to JSON file containing metadata to publish",
|
||||||
),
|
),
|
||||||
click.option(
|
click.option(
|
||||||
"--extra-options", help="Extra options to pass to datasette serve"
|
"--extra-options", help="Extra options to pass to datasette serve"
|
||||||
),
|
),
|
||||||
click.option(
|
click.option(
|
||||||
"--branch", help="Install datasette from a GitHub branch e.g. main"
|
"--branch", help="Install datasette from a GitHub branch e.g. master"
|
||||||
),
|
),
|
||||||
click.option(
|
click.option(
|
||||||
"--template-dir",
|
"--template-dir",
|
||||||
|
|
@ -34,7 +33,7 @@ def add_common_publish_arguments_and_options(subcommand):
|
||||||
click.option(
|
click.option(
|
||||||
"--static",
|
"--static",
|
||||||
type=StaticMount(),
|
type=StaticMount(),
|
||||||
help="Serve static files from this directory at /MOUNT/...",
|
help="mountpoint:path-to-directory for serving static files",
|
||||||
multiple=True,
|
multiple=True,
|
||||||
),
|
),
|
||||||
click.option(
|
click.option(
|
||||||
|
|
@ -42,23 +41,9 @@ def add_common_publish_arguments_and_options(subcommand):
|
||||||
help="Additional packages (e.g. plugins) to install",
|
help="Additional packages (e.g. plugins) to install",
|
||||||
multiple=True,
|
multiple=True,
|
||||||
),
|
),
|
||||||
click.option(
|
|
||||||
"--plugin-secret",
|
|
||||||
nargs=3,
|
|
||||||
type=(str, str, str),
|
|
||||||
callback=validate_plugin_secret,
|
|
||||||
multiple=True,
|
|
||||||
help="Secrets to pass to plugins, e.g. --plugin-secret datasette-auth-github client_id xxx",
|
|
||||||
),
|
|
||||||
click.option(
|
click.option(
|
||||||
"--version-note", help="Additional note to show on /-/versions"
|
"--version-note", help="Additional note to show on /-/versions"
|
||||||
),
|
),
|
||||||
click.option(
|
|
||||||
"--secret",
|
|
||||||
help="Secret used for signing secure values, such as signed cookies",
|
|
||||||
envvar="DATASETTE_PUBLISH_SECRET",
|
|
||||||
default=lambda: os.urandom(32).hex(),
|
|
||||||
),
|
|
||||||
click.option("--title", help="Title for metadata"),
|
click.option("--title", help="Title for metadata"),
|
||||||
click.option("--license", help="License label for metadata"),
|
click.option("--license", help="License label for metadata"),
|
||||||
click.option("--license_url", help="License URL for metadata"),
|
click.option("--license_url", help="License URL for metadata"),
|
||||||
|
|
@ -85,14 +70,9 @@ def fail_if_publish_binary_not_installed(binary, publish_target, install_link):
|
||||||
err=True,
|
err=True,
|
||||||
)
|
)
|
||||||
click.echo(
|
click.echo(
|
||||||
f"Follow the instructions at {install_link}",
|
"Follow the instructions at {install_link}".format(
|
||||||
|
install_link=install_link
|
||||||
|
),
|
||||||
err=True,
|
err=True,
|
||||||
)
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
def validate_plugin_secret(ctx, param, value):
|
|
||||||
for plugin_name, plugin_setting, setting_value in value:
|
|
||||||
if "'" in setting_value:
|
|
||||||
raise click.BadParameter("--plugin-secret cannot contain single quotes")
|
|
||||||
return value
|
|
||||||
|
|
|
||||||
|
|
@ -3,9 +3,7 @@ from datasette import hookimpl
|
||||||
import click
|
import click
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import pathlib
|
|
||||||
import shlex
|
import shlex
|
||||||
import shutil
|
|
||||||
from subprocess import call, check_output
|
from subprocess import call, check_output
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
|
|
@ -13,7 +11,7 @@ from .common import (
|
||||||
add_common_publish_arguments_and_options,
|
add_common_publish_arguments_and_options,
|
||||||
fail_if_publish_binary_not_installed,
|
fail_if_publish_binary_not_installed,
|
||||||
)
|
)
|
||||||
from datasette.utils import link_or_copy, link_or_copy_directory, parse_metadata
|
from datasette.utils import link_or_copy, link_or_copy_directory
|
||||||
|
|
||||||
|
|
||||||
@hookimpl
|
@hookimpl
|
||||||
|
|
@ -26,15 +24,6 @@ def publish_subcommand(publish):
|
||||||
default="datasette",
|
default="datasette",
|
||||||
help="Application name to use when deploying",
|
help="Application name to use when deploying",
|
||||||
)
|
)
|
||||||
@click.option(
|
|
||||||
"--tar",
|
|
||||||
help="--tar option to pass to Heroku, e.g. --tar=/usr/local/bin/gtar",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--generate-dir",
|
|
||||||
type=click.Path(dir_okay=True, file_okay=False),
|
|
||||||
help="Output generated application files and stop without deploying",
|
|
||||||
)
|
|
||||||
def heroku(
|
def heroku(
|
||||||
files,
|
files,
|
||||||
metadata,
|
metadata,
|
||||||
|
|
@ -44,9 +33,7 @@ def publish_subcommand(publish):
|
||||||
plugins_dir,
|
plugins_dir,
|
||||||
static,
|
static,
|
||||||
install,
|
install,
|
||||||
plugin_secret,
|
|
||||||
version_note,
|
version_note,
|
||||||
secret,
|
|
||||||
title,
|
title,
|
||||||
license,
|
license,
|
||||||
license_url,
|
license_url,
|
||||||
|
|
@ -55,10 +42,7 @@ def publish_subcommand(publish):
|
||||||
about,
|
about,
|
||||||
about_url,
|
about_url,
|
||||||
name,
|
name,
|
||||||
tar,
|
|
||||||
generate_dir,
|
|
||||||
):
|
):
|
||||||
"Publish databases to Datasette running on Heroku"
|
|
||||||
fail_if_publish_binary_not_installed(
|
fail_if_publish_binary_not_installed(
|
||||||
"heroku", "Heroku", "https://cli.heroku.com"
|
"heroku", "Heroku", "https://cli.heroku.com"
|
||||||
)
|
)
|
||||||
|
|
@ -77,28 +61,6 @@ def publish_subcommand(publish):
|
||||||
)
|
)
|
||||||
call(["heroku", "plugins:install", "heroku-builds"])
|
call(["heroku", "plugins:install", "heroku-builds"])
|
||||||
|
|
||||||
extra_metadata = {
|
|
||||||
"title": title,
|
|
||||||
"license": license,
|
|
||||||
"license_url": license_url,
|
|
||||||
"source": source,
|
|
||||||
"source_url": source_url,
|
|
||||||
"about": about,
|
|
||||||
"about_url": about_url,
|
|
||||||
}
|
|
||||||
|
|
||||||
environment_variables = {}
|
|
||||||
if plugin_secret:
|
|
||||||
extra_metadata["plugins"] = {}
|
|
||||||
for plugin_name, plugin_setting, setting_value in plugin_secret:
|
|
||||||
environment_variable = (
|
|
||||||
f"{plugin_name}_{plugin_setting}".upper().replace("-", "_")
|
|
||||||
)
|
|
||||||
environment_variables[environment_variable] = setting_value
|
|
||||||
extra_metadata["plugins"].setdefault(plugin_name, {})[
|
|
||||||
plugin_setting
|
|
||||||
] = {"$env": environment_variable}
|
|
||||||
|
|
||||||
with temporary_heroku_directory(
|
with temporary_heroku_directory(
|
||||||
files,
|
files,
|
||||||
name,
|
name,
|
||||||
|
|
@ -110,19 +72,16 @@ def publish_subcommand(publish):
|
||||||
static,
|
static,
|
||||||
install,
|
install,
|
||||||
version_note,
|
version_note,
|
||||||
secret,
|
{
|
||||||
extra_metadata,
|
"title": title,
|
||||||
|
"license": license,
|
||||||
|
"license_url": license_url,
|
||||||
|
"source": source,
|
||||||
|
"source_url": source_url,
|
||||||
|
"about": about,
|
||||||
|
"about_url": about_url,
|
||||||
|
},
|
||||||
):
|
):
|
||||||
if generate_dir:
|
|
||||||
# Recursively copy files from current working directory to it
|
|
||||||
if pathlib.Path(generate_dir).exists():
|
|
||||||
raise click.ClickException("Directory already exists")
|
|
||||||
shutil.copytree(".", generate_dir)
|
|
||||||
click.echo(
|
|
||||||
f"Generated files written to {generate_dir}, stopping without deploying",
|
|
||||||
err=True,
|
|
||||||
)
|
|
||||||
return
|
|
||||||
app_name = None
|
app_name = None
|
||||||
if name:
|
if name:
|
||||||
# Check to see if this app already exists
|
# Check to see if this app already exists
|
||||||
|
|
@ -145,15 +104,7 @@ def publish_subcommand(publish):
|
||||||
create_output = check_output(cmd).decode("utf8")
|
create_output = check_output(cmd).decode("utf8")
|
||||||
app_name = json.loads(create_output)["name"]
|
app_name = json.loads(create_output)["name"]
|
||||||
|
|
||||||
for key, value in environment_variables.items():
|
call(["heroku", "builds:create", "-a", app_name, "--include-vcs-ignore"])
|
||||||
call(["heroku", "config:set", "-a", app_name, f"{key}={value}"])
|
|
||||||
tar_option = []
|
|
||||||
if tar:
|
|
||||||
tar_option = ["--tar", tar]
|
|
||||||
call(
|
|
||||||
["heroku", "builds:create", "-a", app_name, "--include-vcs-ignore"]
|
|
||||||
+ tar_option
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
|
|
@ -168,7 +119,6 @@ def temporary_heroku_directory(
|
||||||
static,
|
static,
|
||||||
install,
|
install,
|
||||||
version_note,
|
version_note,
|
||||||
secret,
|
|
||||||
extra_metadata=None,
|
extra_metadata=None,
|
||||||
):
|
):
|
||||||
extra_metadata = extra_metadata or {}
|
extra_metadata = extra_metadata or {}
|
||||||
|
|
@ -179,7 +129,7 @@ def temporary_heroku_directory(
|
||||||
file_names = [os.path.split(f)[-1] for f in files]
|
file_names = [os.path.split(f)[-1] for f in files]
|
||||||
|
|
||||||
if metadata:
|
if metadata:
|
||||||
metadata_content = parse_metadata(metadata.read())
|
metadata_content = json.load(metadata)
|
||||||
else:
|
else:
|
||||||
metadata_content = {}
|
metadata_content = {}
|
||||||
for key, value in extra_metadata.items():
|
for key, value in extra_metadata.items():
|
||||||
|
|
@ -190,24 +140,24 @@ def temporary_heroku_directory(
|
||||||
os.chdir(tmp.name)
|
os.chdir(tmp.name)
|
||||||
|
|
||||||
if metadata_content:
|
if metadata_content:
|
||||||
with open("metadata.json", "w") as fp:
|
open("metadata.json", "w").write(json.dumps(metadata_content, indent=2))
|
||||||
fp.write(json.dumps(metadata_content, indent=2))
|
|
||||||
|
|
||||||
with open("runtime.txt", "w") as fp:
|
open("runtime.txt", "w").write("python-3.6.8")
|
||||||
fp.write("python-3.11.0")
|
|
||||||
|
|
||||||
if branch:
|
if branch:
|
||||||
install = [
|
install = [
|
||||||
f"https://github.com/simonw/datasette/archive/{branch}.zip"
|
"https://github.com/simonw/datasette/archive/{branch}.zip".format(
|
||||||
|
branch=branch
|
||||||
|
)
|
||||||
] + list(install)
|
] + list(install)
|
||||||
else:
|
else:
|
||||||
install = ["datasette"] + list(install)
|
install = ["datasette"] + list(install)
|
||||||
|
|
||||||
with open("requirements.txt", "w") as fp:
|
open("requirements.txt", "w").write("\n".join(install))
|
||||||
fp.write("\n".join(install))
|
|
||||||
os.mkdir("bin")
|
os.mkdir("bin")
|
||||||
with open("bin/post_compile", "w") as fp:
|
open("bin/post_compile", "w").write(
|
||||||
fp.write("datasette inspect --inspect-file inspect-data.json")
|
"datasette inspect --inspect-file inspect-data.json"
|
||||||
|
)
|
||||||
|
|
||||||
extras = []
|
extras = []
|
||||||
if template_dir:
|
if template_dir:
|
||||||
|
|
@ -231,7 +181,7 @@ def temporary_heroku_directory(
|
||||||
link_or_copy_directory(
|
link_or_copy_directory(
|
||||||
os.path.join(saved_cwd, path), os.path.join(tmp.name, mount_point)
|
os.path.join(saved_cwd, path), os.path.join(tmp.name, mount_point)
|
||||||
)
|
)
|
||||||
extras.extend(["--static", f"{mount_point}:{mount_point}"])
|
extras.extend(["--static", "{}:{}".format(mount_point, mount_point)])
|
||||||
|
|
||||||
quoted_files = " ".join(
|
quoted_files = " ".join(
|
||||||
["-i {}".format(shlex.quote(file_name)) for file_name in file_names]
|
["-i {}".format(shlex.quote(file_name)) for file_name in file_names]
|
||||||
|
|
@ -239,8 +189,7 @@ def temporary_heroku_directory(
|
||||||
procfile_cmd = "web: datasette serve --host 0.0.0.0 {quoted_files} --cors --port $PORT --inspect-file inspect-data.json {extras}".format(
|
procfile_cmd = "web: datasette serve --host 0.0.0.0 {quoted_files} --cors --port $PORT --inspect-file inspect-data.json {extras}".format(
|
||||||
quoted_files=quoted_files, extras=" ".join(extras)
|
quoted_files=quoted_files, extras=" ".join(extras)
|
||||||
)
|
)
|
||||||
with open("Procfile", "w") as fp:
|
open("Procfile", "w").write(procfile_cmd)
|
||||||
fp.write(procfile_cmd)
|
|
||||||
|
|
||||||
for path, filename in zip(file_paths, file_names):
|
for path, filename in zip(file_paths, file_names):
|
||||||
link_or_copy(path, os.path.join(tmp.name, filename))
|
link_or_copy(path, os.path.join(tmp.name, filename))
|
||||||
|
|
|
||||||
101
datasette/publish/now.py
Normal file
101
datasette/publish/now.py
Normal file
|
|
@ -0,0 +1,101 @@
|
||||||
|
from datasette import hookimpl
|
||||||
|
import click
|
||||||
|
import json
|
||||||
|
from subprocess import run, PIPE
|
||||||
|
|
||||||
|
from .common import (
|
||||||
|
add_common_publish_arguments_and_options,
|
||||||
|
fail_if_publish_binary_not_installed,
|
||||||
|
)
|
||||||
|
from ..utils import temporary_docker_directory
|
||||||
|
|
||||||
|
|
||||||
|
@hookimpl
|
||||||
|
def publish_subcommand(publish):
|
||||||
|
@publish.command()
|
||||||
|
@add_common_publish_arguments_and_options
|
||||||
|
@click.option(
|
||||||
|
"-n",
|
||||||
|
"--name",
|
||||||
|
default="datasette",
|
||||||
|
help="Application name to use when deploying",
|
||||||
|
)
|
||||||
|
@click.option("--force", is_flag=True, help="Pass --force option to now")
|
||||||
|
@click.option("--token", help="Auth token to use for deploy")
|
||||||
|
@click.option("--alias", multiple=True, help="Desired alias e.g. yoursite.now.sh")
|
||||||
|
@click.option("--spatialite", is_flag=True, help="Enable SpatialLite extension")
|
||||||
|
def nowv1(
|
||||||
|
files,
|
||||||
|
metadata,
|
||||||
|
extra_options,
|
||||||
|
branch,
|
||||||
|
template_dir,
|
||||||
|
plugins_dir,
|
||||||
|
static,
|
||||||
|
install,
|
||||||
|
version_note,
|
||||||
|
title,
|
||||||
|
license,
|
||||||
|
license_url,
|
||||||
|
source,
|
||||||
|
source_url,
|
||||||
|
about,
|
||||||
|
about_url,
|
||||||
|
name,
|
||||||
|
force,
|
||||||
|
token,
|
||||||
|
alias,
|
||||||
|
spatialite,
|
||||||
|
):
|
||||||
|
fail_if_publish_binary_not_installed("now", "Zeit Now", "https://zeit.co/now")
|
||||||
|
if extra_options:
|
||||||
|
extra_options += " "
|
||||||
|
else:
|
||||||
|
extra_options = ""
|
||||||
|
extra_options += "--config force_https_urls:on"
|
||||||
|
|
||||||
|
with temporary_docker_directory(
|
||||||
|
files,
|
||||||
|
name,
|
||||||
|
metadata,
|
||||||
|
extra_options,
|
||||||
|
branch,
|
||||||
|
template_dir,
|
||||||
|
plugins_dir,
|
||||||
|
static,
|
||||||
|
install,
|
||||||
|
spatialite,
|
||||||
|
version_note,
|
||||||
|
{
|
||||||
|
"title": title,
|
||||||
|
"license": license,
|
||||||
|
"license_url": license_url,
|
||||||
|
"source": source,
|
||||||
|
"source_url": source_url,
|
||||||
|
"about": about,
|
||||||
|
"about_url": about_url,
|
||||||
|
},
|
||||||
|
):
|
||||||
|
now_json = {"version": 1}
|
||||||
|
open("now.json", "w").write(json.dumps(now_json, indent=4))
|
||||||
|
args = []
|
||||||
|
if force:
|
||||||
|
args.append("--force")
|
||||||
|
if token:
|
||||||
|
args.append("--token={}".format(token))
|
||||||
|
if args:
|
||||||
|
done = run(["now"] + args, stdout=PIPE)
|
||||||
|
else:
|
||||||
|
done = run("now", stdout=PIPE)
|
||||||
|
deployment_url = done.stdout
|
||||||
|
if alias:
|
||||||
|
# I couldn't get --target=production working, so I call
|
||||||
|
# 'now alias' with arguments directly instead - but that
|
||||||
|
# means I need to figure out what URL it was deployed to.
|
||||||
|
for single_alias in alias: # --alias can be specified multiple times
|
||||||
|
args = ["now", "alias", deployment_url, single_alias]
|
||||||
|
if token:
|
||||||
|
args.append("--token={}".format(token))
|
||||||
|
run(args)
|
||||||
|
else:
|
||||||
|
print(deployment_url.decode("latin1"))
|
||||||
|
|
@ -4,9 +4,7 @@ from datasette.utils import (
|
||||||
remove_infinites,
|
remove_infinites,
|
||||||
CustomJSONEncoder,
|
CustomJSONEncoder,
|
||||||
path_from_row_pks,
|
path_from_row_pks,
|
||||||
sqlite3,
|
|
||||||
)
|
)
|
||||||
from datasette.utils.asgi import Response
|
|
||||||
|
|
||||||
|
|
||||||
def convert_specific_columns_to_json(rows, columns, json_cols):
|
def convert_specific_columns_to_json(rows, columns, json_cols):
|
||||||
|
|
@ -20,21 +18,21 @@ def convert_specific_columns_to_json(rows, columns, json_cols):
|
||||||
if column in json_cols:
|
if column in json_cols:
|
||||||
try:
|
try:
|
||||||
value = json.loads(value)
|
value = json.loads(value)
|
||||||
except (TypeError, ValueError):
|
except (TypeError, ValueError) as e:
|
||||||
|
print(e)
|
||||||
pass
|
pass
|
||||||
new_row.append(value)
|
new_row.append(value)
|
||||||
new_rows.append(new_row)
|
new_rows.append(new_row)
|
||||||
return new_rows
|
return new_rows
|
||||||
|
|
||||||
|
|
||||||
def json_renderer(request, args, data, error, truncated=None):
|
def json_renderer(args, data, view_name):
|
||||||
"""Render a response as JSON"""
|
""" Render a response as JSON """
|
||||||
status_code = 200
|
status_code = 200
|
||||||
|
|
||||||
# Handle the _json= parameter which may modify data["rows"]
|
# Handle the _json= parameter which may modify data["rows"]
|
||||||
json_cols = []
|
json_cols = []
|
||||||
if "_json" in args:
|
if "_json" in args:
|
||||||
json_cols = args.getlist("_json")
|
json_cols = args["_json"]
|
||||||
if json_cols and "rows" in data and "columns" in data:
|
if json_cols and "rows" in data and "columns" in data:
|
||||||
data["rows"] = convert_specific_columns_to_json(
|
data["rows"] = convert_specific_columns_to_json(
|
||||||
data["rows"], data["columns"], json_cols
|
data["rows"], data["columns"], json_cols
|
||||||
|
|
@ -45,38 +43,22 @@ def json_renderer(request, args, data, error, truncated=None):
|
||||||
data["rows"] = [remove_infinites(row) for row in data["rows"]]
|
data["rows"] = [remove_infinites(row) for row in data["rows"]]
|
||||||
|
|
||||||
# Deal with the _shape option
|
# Deal with the _shape option
|
||||||
shape = args.get("_shape", "objects")
|
shape = args.get("_shape", "arrays")
|
||||||
# if there's an error, ignore the shape entirely
|
|
||||||
data["ok"] = True
|
|
||||||
if error:
|
|
||||||
shape = "objects"
|
|
||||||
status_code = 400
|
|
||||||
data["error"] = error
|
|
||||||
data["ok"] = False
|
|
||||||
|
|
||||||
if truncated is not None:
|
|
||||||
data["truncated"] = truncated
|
|
||||||
if shape == "arrayfirst":
|
if shape == "arrayfirst":
|
||||||
if not data["rows"]:
|
data = [row[0] for row in data["rows"]]
|
||||||
data = []
|
|
||||||
elif isinstance(data["rows"][0], sqlite3.Row):
|
|
||||||
data = [row[0] for row in data["rows"]]
|
|
||||||
else:
|
|
||||||
assert isinstance(data["rows"][0], dict)
|
|
||||||
data = [next(iter(row.values())) for row in data["rows"]]
|
|
||||||
elif shape in ("objects", "object", "array"):
|
elif shape in ("objects", "object", "array"):
|
||||||
columns = data.get("columns")
|
columns = data.get("columns")
|
||||||
rows = data.get("rows")
|
rows = data.get("rows")
|
||||||
if rows and columns and not isinstance(rows[0], dict):
|
if rows and columns:
|
||||||
data["rows"] = [dict(zip(columns, row)) for row in rows]
|
data["rows"] = [dict(zip(columns, row)) for row in rows]
|
||||||
if shape == "object":
|
if shape == "object":
|
||||||
shape_error = None
|
error = None
|
||||||
if "primary_keys" not in data:
|
if "primary_keys" not in data:
|
||||||
shape_error = "_shape=object is only available on tables"
|
error = "_shape=object is only available on tables"
|
||||||
else:
|
else:
|
||||||
pks = data["primary_keys"]
|
pks = data["primary_keys"]
|
||||||
if not pks:
|
if not pks:
|
||||||
shape_error = (
|
error = (
|
||||||
"_shape=object not available for tables with no primary keys"
|
"_shape=object not available for tables with no primary keys"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
|
@ -85,41 +67,26 @@ def json_renderer(request, args, data, error, truncated=None):
|
||||||
pk_string = path_from_row_pks(row, pks, not pks)
|
pk_string = path_from_row_pks(row, pks, not pks)
|
||||||
object_rows[pk_string] = row
|
object_rows[pk_string] = row
|
||||||
data = object_rows
|
data = object_rows
|
||||||
if shape_error:
|
if error:
|
||||||
data = {"ok": False, "error": shape_error}
|
data = {"ok": False, "error": error}
|
||||||
elif shape == "array":
|
elif shape == "array":
|
||||||
data = data["rows"]
|
data = data["rows"]
|
||||||
|
|
||||||
elif shape == "arrays":
|
elif shape == "arrays":
|
||||||
if not data["rows"]:
|
pass
|
||||||
pass
|
|
||||||
elif isinstance(data["rows"][0], sqlite3.Row):
|
|
||||||
data["rows"] = [list(row) for row in data["rows"]]
|
|
||||||
else:
|
|
||||||
data["rows"] = [list(row.values()) for row in data["rows"]]
|
|
||||||
else:
|
else:
|
||||||
status_code = 400
|
status_code = 400
|
||||||
data = {
|
data = {
|
||||||
"ok": False,
|
"ok": False,
|
||||||
"error": f"Invalid _shape: {shape}",
|
"error": "Invalid _shape: {}".format(shape),
|
||||||
"status": 400,
|
"status": 400,
|
||||||
"title": None,
|
"title": None,
|
||||||
}
|
}
|
||||||
|
|
||||||
# Don't include "columns" in output
|
|
||||||
# https://github.com/simonw/datasette/issues/2136
|
|
||||||
if isinstance(data, dict) and "columns" not in request.args.getlist("_extra"):
|
|
||||||
data.pop("columns", None)
|
|
||||||
|
|
||||||
# Handle _nl option for _shape=array
|
# Handle _nl option for _shape=array
|
||||||
nl = args.get("_nl", "")
|
nl = args.get("_nl", "")
|
||||||
if nl and shape == "array":
|
if nl and shape == "array":
|
||||||
body = "\n".join(json.dumps(item, cls=CustomJSONEncoder) for item in data)
|
body = "\n".join(json.dumps(item) for item in data)
|
||||||
content_type = "text/plain"
|
content_type = "text/plain"
|
||||||
else:
|
else:
|
||||||
body = json.dumps(data, cls=CustomJSONEncoder)
|
body = json.dumps(data, cls=CustomJSONEncoder)
|
||||||
content_type = "application/json; charset=utf-8"
|
content_type = "application/json; charset=utf-8"
|
||||||
headers = {}
|
return {"body": body, "status_code": status_code, "content_type": content_type}
|
||||||
return Response(
|
|
||||||
body, status=status_code, headers=headers, content_type=content_type
|
|
||||||
)
|
|
||||||
|
|
|
||||||
|
|
@ -1,90 +0,0 @@
|
||||||
"""Core resource types for Datasette's permission system."""
|
|
||||||
|
|
||||||
from datasette.permissions import Resource
|
|
||||||
|
|
||||||
|
|
||||||
class DatabaseResource(Resource):
|
|
||||||
"""A database in Datasette."""
|
|
||||||
|
|
||||||
name = "database"
|
|
||||||
parent_class = None # Top of the resource hierarchy
|
|
||||||
|
|
||||||
def __init__(self, database: str):
|
|
||||||
super().__init__(parent=database, child=None)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def resources_sql(cls, datasette) -> str:
|
|
||||||
return """
|
|
||||||
SELECT database_name AS parent, NULL AS child
|
|
||||||
FROM catalog_databases
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class TableResource(Resource):
|
|
||||||
"""A table in a database."""
|
|
||||||
|
|
||||||
name = "table"
|
|
||||||
parent_class = DatabaseResource
|
|
||||||
|
|
||||||
def __init__(self, database: str, table: str):
|
|
||||||
super().__init__(parent=database, child=table)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def resources_sql(cls, datasette) -> str:
|
|
||||||
return """
|
|
||||||
SELECT database_name AS parent, table_name AS child
|
|
||||||
FROM catalog_tables
|
|
||||||
UNION ALL
|
|
||||||
SELECT database_name AS parent, view_name AS child
|
|
||||||
FROM catalog_views
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class QueryResource(Resource):
|
|
||||||
"""A canned query in a database."""
|
|
||||||
|
|
||||||
name = "query"
|
|
||||||
parent_class = DatabaseResource
|
|
||||||
|
|
||||||
def __init__(self, database: str, query: str):
|
|
||||||
super().__init__(parent=database, child=query)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def resources_sql(cls, datasette) -> str:
|
|
||||||
from datasette.plugins import pm
|
|
||||||
from datasette.utils import await_me_maybe
|
|
||||||
|
|
||||||
# Get all databases from catalog
|
|
||||||
db = datasette.get_internal_database()
|
|
||||||
result = await db.execute("SELECT database_name FROM catalog_databases")
|
|
||||||
databases = [row[0] for row in result.rows]
|
|
||||||
|
|
||||||
# Gather all canned queries from all databases
|
|
||||||
query_pairs = []
|
|
||||||
for database_name in databases:
|
|
||||||
# Call the hook to get queries (including from config via default plugin)
|
|
||||||
for queries_result in pm.hook.canned_queries(
|
|
||||||
datasette=datasette,
|
|
||||||
database=database_name,
|
|
||||||
actor=None, # Get ALL queries for resource enumeration
|
|
||||||
):
|
|
||||||
queries = await await_me_maybe(queries_result)
|
|
||||||
if queries:
|
|
||||||
for query_name in queries.keys():
|
|
||||||
query_pairs.append((database_name, query_name))
|
|
||||||
|
|
||||||
# Build SQL
|
|
||||||
if not query_pairs:
|
|
||||||
return "SELECT NULL AS parent, NULL AS child WHERE 0"
|
|
||||||
|
|
||||||
# Generate UNION ALL query
|
|
||||||
selects = []
|
|
||||||
for db_name, query_name in query_pairs:
|
|
||||||
# Escape single quotes by doubling them
|
|
||||||
db_escaped = db_name.replace("'", "''")
|
|
||||||
query_escaped = query_name.replace("'", "''")
|
|
||||||
selects.append(
|
|
||||||
f"SELECT '{db_escaped}' AS parent, '{query_escaped}' AS child"
|
|
||||||
)
|
|
||||||
|
|
||||||
return " UNION ALL ".join(selects)
|
|
||||||
|
|
@ -1,7 +0,0 @@
|
||||||
from datasette import hookimpl
|
|
||||||
from datasette.utils import escape_fts
|
|
||||||
|
|
||||||
|
|
||||||
@hookimpl
|
|
||||||
def prepare_connection(conn):
|
|
||||||
conn.create_function("escape_fts", 1, escape_fts)
|
|
||||||
|
|
@ -1,481 +1,97 @@
|
||||||
/* Reset and Page Setup ==================================================== */
|
|
||||||
|
|
||||||
/* Reset from http://meyerweb.com/eric/tools/css/reset/
|
|
||||||
v2.0 | 20110126
|
|
||||||
License: none (public domain)
|
|
||||||
*/
|
|
||||||
html, body, div, span, applet, object, iframe,
|
|
||||||
h1, h2, h3, h4, h5, h6, p, blockquote, pre,
|
|
||||||
a, abbr, acronym, address, big, cite, code,
|
|
||||||
del, dfn, em, img, ins, kbd, q, s, samp,
|
|
||||||
small, strike, strong, sub, sup, tt, var,
|
|
||||||
b, u, i, center,
|
|
||||||
dl, dt, dd, ol, ul, li,
|
|
||||||
fieldset, form, label, legend,
|
|
||||||
table, caption, tbody, tfoot, thead, tr, th, td,
|
|
||||||
article, aside, canvas, details, embed,
|
|
||||||
figure, figcaption, footer, header, hgroup,
|
|
||||||
menu, nav, output, ruby, section, summary,
|
|
||||||
time, mark, audio, video {
|
|
||||||
margin: 0;
|
|
||||||
padding: 0;
|
|
||||||
border: 0;
|
|
||||||
font-size: 100%;
|
|
||||||
font: inherit;
|
|
||||||
vertical-align: baseline;
|
|
||||||
}
|
|
||||||
/* HTML5 display-role reset for older browsers */
|
|
||||||
article, aside, details, figcaption, figure,
|
|
||||||
footer, header, hgroup, menu, nav, section {
|
|
||||||
display: block;
|
|
||||||
}
|
|
||||||
body {
|
body {
|
||||||
line-height: 1;
|
margin: 0 1em;
|
||||||
}
|
|
||||||
ol,
|
|
||||||
ul {
|
|
||||||
list-style: none;
|
|
||||||
}
|
|
||||||
blockquote,
|
|
||||||
q {
|
|
||||||
quotes: none;
|
|
||||||
}
|
|
||||||
blockquote:before,
|
|
||||||
blockquote:after,
|
|
||||||
q:before,
|
|
||||||
q:after {
|
|
||||||
content: '';
|
|
||||||
content: none;
|
|
||||||
}
|
|
||||||
table {
|
|
||||||
border-collapse: collapse;
|
|
||||||
border-spacing: 0;
|
|
||||||
}
|
|
||||||
th {
|
|
||||||
padding-right: 1em;
|
|
||||||
white-space: nowrap;
|
|
||||||
}
|
|
||||||
strong {
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
em {
|
|
||||||
font-style: italic;
|
|
||||||
}
|
|
||||||
/* end reset */
|
|
||||||
|
|
||||||
|
|
||||||
body {
|
|
||||||
margin: 0;
|
|
||||||
padding: 0;
|
|
||||||
font-family: "Helvetica Neue", sans-serif;
|
font-family: "Helvetica Neue", sans-serif;
|
||||||
font-size: 1rem;
|
font-size: 1rem;
|
||||||
font-weight: 400;
|
font-weight: 400;
|
||||||
line-height: 1.5;
|
line-height: 1.5;
|
||||||
color: #111A35;
|
color: #212529;
|
||||||
text-align: left;
|
text-align: left;
|
||||||
background-color: #F8FAFB;
|
background-color: #fff;
|
||||||
}
|
}
|
||||||
|
table {
|
||||||
/* Helper Styles ===========================================================*/
|
|
||||||
|
|
||||||
.intro {
|
|
||||||
font-size: 1rem;
|
|
||||||
}
|
|
||||||
.metadata-description {
|
|
||||||
margin-bottom: 1em;
|
|
||||||
}
|
|
||||||
p {
|
|
||||||
margin: 0 0 0.75rem 0;
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
.meta {
|
|
||||||
color: rgba(0,0,0,0.3);
|
|
||||||
font-size: 0.75rem
|
|
||||||
}
|
|
||||||
.intro {
|
|
||||||
font-size: 1.5rem;
|
|
||||||
margin-bottom: 0.75rem;
|
|
||||||
}
|
|
||||||
.context-text {
|
|
||||||
/* for accessibility and hidden from sight */
|
|
||||||
text-indent: -999em;
|
|
||||||
display: block;
|
|
||||||
width:0;
|
|
||||||
overflow: hidden;
|
|
||||||
margin: 0;
|
|
||||||
padding: 0;
|
|
||||||
line-height: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
h1,
|
|
||||||
h2,
|
|
||||||
h3,
|
|
||||||
h4,
|
|
||||||
h5,
|
|
||||||
h6,
|
|
||||||
.header1,
|
|
||||||
.header2,
|
|
||||||
.header3,
|
|
||||||
.header4,
|
|
||||||
.header5,
|
|
||||||
.header6 {
|
|
||||||
font-weight: 700;
|
|
||||||
font-size: 1rem;
|
|
||||||
margin: 0;
|
|
||||||
padding: 0;
|
|
||||||
word-break: break-word;
|
|
||||||
}
|
|
||||||
h1,
|
|
||||||
.header1 {
|
|
||||||
font-size: 2rem;
|
|
||||||
margin-bottom: 0.75rem;
|
|
||||||
margin-top: 1rem;
|
|
||||||
}
|
|
||||||
h2,
|
|
||||||
.header2 {
|
|
||||||
font-size: 1.5rem;
|
|
||||||
margin-bottom: 0.75rem;
|
|
||||||
margin-top: 1rem;
|
|
||||||
}
|
|
||||||
h3,
|
|
||||||
.header3 {
|
|
||||||
font-size: 1.25rem;
|
|
||||||
margin: 1rem 0 0.25rem 0;
|
|
||||||
}
|
|
||||||
h4,
|
|
||||||
.header4 {
|
|
||||||
margin: 1rem 0 0.25rem 0;
|
|
||||||
font-weight: 400;
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
h5,
|
|
||||||
.header5 {
|
|
||||||
margin: 1rem 0 0.25rem 0;
|
|
||||||
font-weight: 700;
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
h6,
|
|
||||||
.header6 {
|
|
||||||
margin: 1rem 0 0.25rem 0;
|
|
||||||
font-weight: 400;
|
|
||||||
font-style: italic;
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
|
|
||||||
.page-header {
|
|
||||||
padding-left: 10px;
|
|
||||||
border-left: 10px solid #666;
|
|
||||||
margin-bottom: 0.75rem;
|
|
||||||
margin-top: 1rem;
|
|
||||||
}
|
|
||||||
.page-header h1 {
|
|
||||||
margin: 0;
|
|
||||||
font-size: 2rem;
|
|
||||||
padding-right: 0.2em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.page-action-menu details > summary {
|
|
||||||
list-style: none;
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
.page-action-menu details > summary::-webkit-details-marker {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
div,
|
|
||||||
section,
|
|
||||||
article,
|
|
||||||
header,
|
|
||||||
nav,
|
|
||||||
footer,
|
|
||||||
.wrapper {
|
|
||||||
display: block;
|
|
||||||
box-sizing: border-box;
|
|
||||||
}
|
|
||||||
|
|
||||||
a:link {
|
|
||||||
color: #276890;
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
a:visited {
|
|
||||||
color: #54AC8E;
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
a:hover,
|
|
||||||
a:focus,
|
|
||||||
a:active {
|
|
||||||
color: #67C98D;
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
|
|
||||||
button.button-as-link {
|
|
||||||
background: none;
|
|
||||||
border: none;
|
|
||||||
padding: 0;
|
|
||||||
color: #276890;
|
|
||||||
text-decoration: underline;
|
|
||||||
cursor: pointer;
|
|
||||||
font-size: 1rem;
|
|
||||||
}
|
|
||||||
button.button-as-link:hover,
|
|
||||||
button.button-as-link:focus {
|
|
||||||
color: #67C98D;
|
|
||||||
}
|
|
||||||
|
|
||||||
code,
|
|
||||||
pre {
|
|
||||||
font-family: monospace;
|
|
||||||
}
|
|
||||||
|
|
||||||
ul.bullets,
|
|
||||||
ul.tight-bullets,
|
|
||||||
ul.spaced,
|
|
||||||
ol.spaced {
|
|
||||||
margin-bottom: 0.8rem;
|
|
||||||
}
|
|
||||||
ul.bullets,
|
|
||||||
ul.tight-bullets {
|
|
||||||
padding-left: 1.25rem;
|
|
||||||
}
|
|
||||||
ul.bullets li,
|
|
||||||
ul.spaced li,
|
|
||||||
ol.spaced li {
|
|
||||||
margin-bottom: 0.4rem;
|
|
||||||
}
|
|
||||||
ul.bullets li {
|
|
||||||
list-style-type: circle;
|
|
||||||
}
|
|
||||||
ul.tight-bullets li {
|
|
||||||
list-style-type: disc;
|
|
||||||
margin-bottom: 0;
|
|
||||||
word-break: break-all;
|
|
||||||
}
|
|
||||||
a.not-underlined {
|
|
||||||
text-decoration: none;
|
|
||||||
}
|
|
||||||
.not-underlined .underlined {
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Page Furniture ========================================================= */
|
|
||||||
/* Header */
|
|
||||||
header.hd,
|
|
||||||
footer.ft {
|
|
||||||
padding: 0.6rem 1rem 0.5rem 1rem;
|
|
||||||
background-color: #276890;
|
|
||||||
background: linear-gradient(180deg, rgba(96,144,173,1) 0%, rgba(39,104,144,1) 50%);
|
|
||||||
color: rgba(255,255,244,0.9);
|
|
||||||
overflow: hidden;
|
|
||||||
box-sizing: border-box;
|
|
||||||
min-height: 2.6rem;
|
|
||||||
}
|
|
||||||
footer.ft {
|
|
||||||
margin-top: 1rem;
|
|
||||||
}
|
|
||||||
header.hd p,
|
|
||||||
footer.ft p {
|
|
||||||
margin: 0;
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
header.hd .crumbs {
|
|
||||||
float: left;
|
|
||||||
}
|
|
||||||
header.hd .actor {
|
|
||||||
float: right;
|
|
||||||
text-align: right;
|
|
||||||
padding-left: 1rem;
|
|
||||||
padding-right: 1rem;
|
|
||||||
position: relative;
|
|
||||||
top: -3px;
|
|
||||||
}
|
|
||||||
|
|
||||||
footer.ft a:link,
|
|
||||||
footer.ft a:visited,
|
|
||||||
footer.ft a:hover,
|
|
||||||
footer.ft a:focus,
|
|
||||||
footer.ft a:active,
|
|
||||||
footer.ft button.button-as-link {
|
|
||||||
color: rgba(255,255,244,0.8);
|
|
||||||
}
|
|
||||||
header.hd a:link,
|
|
||||||
header.hd a:visited,
|
|
||||||
header.hd a:hover,
|
|
||||||
header.hd a:focus,
|
|
||||||
header.hd a:active,
|
|
||||||
header.hd button.button-as-link {
|
|
||||||
color: rgba(255,255,244,0.8);
|
|
||||||
text-decoration: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
footer.ft a:hover,
|
|
||||||
footer.ft a:focus,
|
|
||||||
footer.ft a:active,
|
|
||||||
footer.ft .button-as-link:hover,
|
|
||||||
footer.ft .button-as-link:focus,
|
|
||||||
header.hd a:hover,
|
|
||||||
header.hd a:focus,
|
|
||||||
header.hd a:active,
|
|
||||||
button.button-as-link:hover,
|
|
||||||
button.button-as-link:focus {
|
|
||||||
color: rgba(255,255,244,1);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/* Body */
|
|
||||||
section.content {
|
|
||||||
margin: 0 1rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Navigation menu */
|
|
||||||
details.nav-menu > summary {
|
|
||||||
list-style: none;
|
|
||||||
display: inline;
|
|
||||||
float: right;
|
|
||||||
position: relative;
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
details.nav-menu > summary::-webkit-details-marker {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
details .nav-menu-inner {
|
|
||||||
position: absolute;
|
|
||||||
top: 2.6rem;
|
|
||||||
right: 10px;
|
|
||||||
width: 180px;
|
|
||||||
background-color: #276890;
|
|
||||||
z-index: 1000;
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
.nav-menu-inner li,
|
|
||||||
form.nav-menu-logout {
|
|
||||||
padding: 0.3rem 0.5rem;
|
|
||||||
border-top: 1px solid #ffffff69;
|
|
||||||
}
|
|
||||||
.nav-menu-inner a {
|
|
||||||
display: block;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Table/database actions menu */
|
|
||||||
.page-action-menu {
|
|
||||||
position: relative;
|
|
||||||
margin-bottom: 0.5em;
|
|
||||||
}
|
|
||||||
.actions-menu-links {
|
|
||||||
display: inline;
|
|
||||||
}
|
|
||||||
.actions-menu-links .dropdown-menu {
|
|
||||||
position: absolute;
|
|
||||||
top: calc(100% + 10px);
|
|
||||||
left: 0;
|
|
||||||
z-index: 10000;
|
|
||||||
}
|
|
||||||
.page-action-menu .icon-text {
|
|
||||||
display: inline-flex;
|
|
||||||
align-items: center;
|
|
||||||
border-radius: .25rem;
|
|
||||||
padding: 5px 12px 3px 7px;
|
|
||||||
color: #fff;
|
|
||||||
font-weight: 400;
|
|
||||||
font-size: 0.8em;
|
|
||||||
background: linear-gradient(180deg, #007bff 0%, #4E79C7 100%);
|
|
||||||
border-color: #007bff;
|
|
||||||
}
|
|
||||||
.page-action-menu .icon-text span {
|
|
||||||
/* Nudge text up a bit */
|
|
||||||
position: relative;
|
|
||||||
top: -2px;
|
|
||||||
}
|
|
||||||
.page-action-menu .icon-text:hover {
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
.page-action-menu .icon {
|
|
||||||
width: 18px;
|
|
||||||
height: 18px;
|
|
||||||
margin-right: 4px;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Components ============================================================== */
|
|
||||||
|
|
||||||
|
|
||||||
h2 em {
|
|
||||||
font-style: normal;
|
|
||||||
font-weight: lighter;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Messages */
|
|
||||||
|
|
||||||
.message-info,
|
|
||||||
.message-warning,
|
|
||||||
.message-error {
|
|
||||||
padding: 1rem;
|
|
||||||
margin-bottom: 1rem;
|
|
||||||
background-color: rgba(103,201,141,0.3);
|
|
||||||
}
|
|
||||||
.message-warning {
|
|
||||||
background-color: rgba(245,166,35,0.3);
|
|
||||||
}
|
|
||||||
.message-error {
|
|
||||||
background-color: rgba(208,2,27,0.3);
|
|
||||||
}
|
|
||||||
|
|
||||||
.pattern-heading {
|
|
||||||
padding: 1rem;
|
|
||||||
margin-top: 2rem;
|
|
||||||
border-top: 1px solid rgba(208,2,27,0.8);
|
|
||||||
border-bottom: 1px solid rgba(208,2,27,0.8);
|
|
||||||
background-color: rgba(208,2,27,0.2)
|
|
||||||
}
|
|
||||||
|
|
||||||
/* URL arguments */
|
|
||||||
.extra-wheres ul,
|
|
||||||
.extra-wheres li {
|
|
||||||
list-style-type: none;
|
|
||||||
padding: 0;
|
|
||||||
margin: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.wrapped-sql {
|
|
||||||
white-space: pre-wrap;
|
|
||||||
margin: 1rem 0;
|
|
||||||
font-family: monospace;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Tables ================================================================== */
|
|
||||||
.table-wrapper {
|
|
||||||
overflow-x: auto;
|
|
||||||
}
|
|
||||||
table.rows-and-columns {
|
|
||||||
border-collapse: collapse;
|
border-collapse: collapse;
|
||||||
}
|
}
|
||||||
table.rows-and-columns td {
|
td {
|
||||||
border-top: 1px solid #aaa;
|
border-top: 1px solid #aaa;
|
||||||
border-right: 1px solid #eee;
|
border-right: 1px solid #eee;
|
||||||
padding: 4px;
|
padding: 4px;
|
||||||
vertical-align: top;
|
vertical-align: top;
|
||||||
white-space: pre-wrap;
|
|
||||||
}
|
}
|
||||||
table.rows-and-columns td.type-pk {
|
td.col-link {
|
||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
}
|
}
|
||||||
table.rows-and-columns td em {
|
td em {
|
||||||
font-style: normal;
|
font-style: normal;
|
||||||
font-size: 0.8em;
|
font-size: 0.8em;
|
||||||
color: #aaa;
|
color: #aaa;
|
||||||
}
|
}
|
||||||
table.rows-and-columns th {
|
th {
|
||||||
padding-right: 1em;
|
padding-right: 1em;
|
||||||
}
|
}
|
||||||
table.rows-and-columns a:link {
|
table a:link {
|
||||||
|
text-decoration: none;
|
||||||
|
color: #445ac8;
|
||||||
|
}
|
||||||
|
table a:visited {
|
||||||
|
color: #8f54c4;
|
||||||
|
}
|
||||||
|
.small-screen-only,
|
||||||
|
.select-wrapper.small-screen-only {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
@media only screen and (max-width: 576px) {
|
||||||
|
.small-screen-only {
|
||||||
|
display: initial;
|
||||||
|
}
|
||||||
|
/* Force table to not be like tables anymore */
|
||||||
|
table.rows-and-columns,
|
||||||
|
.rows-and-columns thead,
|
||||||
|
.rows-and-columns tbody,
|
||||||
|
.rows-and-columns th,
|
||||||
|
.rows-and-columns td,
|
||||||
|
.rows-and-columns tr {
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Hide table headers (but not display: none;, for accessibility) */
|
||||||
|
.rows-and-columns thead tr {
|
||||||
|
position: absolute;
|
||||||
|
top: -9999px;
|
||||||
|
left: -9999px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.rows-and-columns tr {
|
||||||
|
border: 1px solid #ccc;
|
||||||
|
margin-bottom: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.rows-and-columns td {
|
||||||
|
/* Behave like a "row" */
|
||||||
|
border: none;
|
||||||
|
border-bottom: 1px solid #eee;
|
||||||
|
padding: 0;
|
||||||
|
padding-left: 10%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.rows-and-columns td:before {
|
||||||
|
display: block;
|
||||||
|
margin-left: -10%;
|
||||||
|
font-size: 0.8em;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.hd {
|
||||||
|
border-bottom: 2px solid #ccc;
|
||||||
|
}
|
||||||
|
.ft {
|
||||||
|
margin: 1em 0;
|
||||||
|
border-top: 1px solid #ccc;
|
||||||
|
font-size: 0.8em;
|
||||||
|
}
|
||||||
|
.hd :link {
|
||||||
text-decoration: none;
|
text-decoration: none;
|
||||||
}
|
}
|
||||||
.rows-and-columns td ol,
|
|
||||||
.rows-and-columns td ul {
|
|
||||||
list-style: initial;
|
|
||||||
list-style-position: inside;
|
|
||||||
}
|
|
||||||
a.blob-download {
|
|
||||||
display: inline-block;
|
|
||||||
}
|
|
||||||
.db-table p {
|
.db-table p {
|
||||||
margin-top: 0;
|
margin-top: 0;
|
||||||
margin-bottom: 0.3em;
|
margin-bottom: 0.3em;
|
||||||
|
|
@ -485,8 +101,15 @@ a.blob-download {
|
||||||
margin-bottom: 0;
|
margin-bottom: 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Forms =================================================================== */
|
h2 em {
|
||||||
|
font-style: normal;
|
||||||
|
font-weight: lighter;
|
||||||
|
}
|
||||||
|
.extra-wheres ul, .extra-wheres li {
|
||||||
|
list-style-type: none;
|
||||||
|
padding: 0;
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
form.sql textarea {
|
form.sql textarea {
|
||||||
border: 1px solid #ccc;
|
border: 1px solid #ccc;
|
||||||
width: 70%;
|
width: 70%;
|
||||||
|
|
@ -495,30 +118,24 @@ form.sql textarea {
|
||||||
font-family: monospace;
|
font-family: monospace;
|
||||||
font-size: 1.3em;
|
font-size: 1.3em;
|
||||||
}
|
}
|
||||||
form.sql label {
|
form label {
|
||||||
|
font-weight: bold;
|
||||||
|
display: inline-block;
|
||||||
width: 15%;
|
width: 15%;
|
||||||
}
|
}
|
||||||
|
.advanced-export form label {
|
||||||
|
width: auto;
|
||||||
|
}
|
||||||
.advanced-export input[type=submit] {
|
.advanced-export input[type=submit] {
|
||||||
font-size: 0.6em;
|
font-size: 0.6em;
|
||||||
margin-left: 1em;
|
margin-left: 1em;
|
||||||
}
|
}
|
||||||
label.sort_by_desc {
|
label.sort_by_desc {
|
||||||
|
width: auto;
|
||||||
padding-right: 1em;
|
padding-right: 1em;
|
||||||
}
|
}
|
||||||
pre#sql-query {
|
form input[type=text],
|
||||||
margin-bottom: 1em;
|
form input[type=search] {
|
||||||
}
|
|
||||||
|
|
||||||
.core label,
|
|
||||||
label.core {
|
|
||||||
font-weight: bold;
|
|
||||||
display: inline-block;
|
|
||||||
}
|
|
||||||
|
|
||||||
.core input[type=text],
|
|
||||||
input.core[type=text],
|
|
||||||
.core input[type=search],
|
|
||||||
input.core[type=search] {
|
|
||||||
border: 1px solid #ccc;
|
border: 1px solid #ccc;
|
||||||
border-radius: 3px;
|
border-radius: 3px;
|
||||||
width: 60%;
|
width: 60%;
|
||||||
|
|
@ -527,54 +144,27 @@ input.core[type=search] {
|
||||||
font-size: 1em;
|
font-size: 1em;
|
||||||
font-family: Helvetica, sans-serif;
|
font-family: Helvetica, sans-serif;
|
||||||
}
|
}
|
||||||
.core input[type=search],
|
@media only screen and (max-width: 576px) {
|
||||||
input.core[type=search] {
|
form.sql textarea {
|
||||||
/* Stop Webkit from styling search boxes in an inconsistent way */
|
width: 95%;
|
||||||
/* https://css-tricks.com/webkit-html5-search-inputs/ comments */
|
}
|
||||||
-webkit-appearance: textfield;
|
|
||||||
}
|
}
|
||||||
.core input[type="search"]::-webkit-search-decoration,
|
form input[type=submit] {
|
||||||
input.core[type="search"]::-webkit-search-decoration,
|
color: #fff;
|
||||||
.core input[type="search"]::-webkit-search-cancel-button,
|
background-color: #007bff;
|
||||||
input.core[type="search"]::-webkit-search-cancel-button,
|
border-color: #007bff;
|
||||||
.core input[type="search"]::-webkit-search-results-button,
|
|
||||||
input.core[type="search"]::-webkit-search-results-button,
|
|
||||||
.core input[type="search"]::-webkit-search-results-decoration,
|
|
||||||
input.core[type="search"]::-webkit-search-results-decoration {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
.core input[type=submit],
|
|
||||||
.core button[type=button],
|
|
||||||
input.core[type=submit],
|
|
||||||
button.core[type=button] {
|
|
||||||
font-weight: 400;
|
font-weight: 400;
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
text-align: center;
|
text-align: center;
|
||||||
vertical-align: middle;
|
vertical-align: middle;
|
||||||
border-width: 1px;
|
border: 1px solid blue;
|
||||||
border-style: solid;
|
|
||||||
padding: .5em 0.8em;
|
padding: .5em 0.8em;
|
||||||
font-size: 0.9rem;
|
font-size: 0.9rem;
|
||||||
line-height: 1;
|
line-height: 1;
|
||||||
border-radius: .25rem;
|
border-radius: .25rem;
|
||||||
}
|
|
||||||
|
|
||||||
.core input[type=submit],
|
|
||||||
input.core[type=submit] {
|
|
||||||
color: #fff;
|
|
||||||
background: linear-gradient(180deg, #007bff 0%, #4E79C7 100%);
|
|
||||||
border-color: #007bff;
|
|
||||||
-webkit-appearance: button;
|
-webkit-appearance: button;
|
||||||
}
|
}
|
||||||
|
|
||||||
.core button[type=button],
|
|
||||||
button.core[type=button] {
|
|
||||||
color: #007bff;
|
|
||||||
background-color: #fff;
|
|
||||||
border-color: #007bff;
|
|
||||||
}
|
|
||||||
|
|
||||||
.filter-row {
|
.filter-row {
|
||||||
margin-bottom: 0.6em;
|
margin-bottom: 0.6em;
|
||||||
}
|
}
|
||||||
|
|
@ -599,9 +189,6 @@ button.core[type=button] {
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
margin-right: 0.3em;
|
margin-right: 0.3em;
|
||||||
}
|
}
|
||||||
.select-wrapper:focus-within {
|
|
||||||
border: 1px solid black;
|
|
||||||
}
|
|
||||||
.select-wrapper.filter-op {
|
.select-wrapper.filter-op {
|
||||||
width: 80px;
|
width: 80px;
|
||||||
}
|
}
|
||||||
|
|
@ -650,9 +237,27 @@ button.core[type=button] {
|
||||||
font-size: 1em;
|
font-size: 1em;
|
||||||
font-family: Helvetica, sans-serif;
|
font-family: Helvetica, sans-serif;
|
||||||
}
|
}
|
||||||
|
@media only screen and (max-width: 576px) {
|
||||||
|
.select-wrapper.small-screen-only {
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
.select-wrapper {
|
||||||
|
width: 100px;
|
||||||
|
}
|
||||||
|
.select-wrapper.filter-op {
|
||||||
|
width: 60px;
|
||||||
|
}
|
||||||
|
.filters input.filter-value {
|
||||||
|
width: 140px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
a.not-underlined {
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
.not-underlined .underlined {
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
.facet-results {
|
.facet-results {
|
||||||
display: flex;
|
display: flex;
|
||||||
|
|
@ -663,11 +268,6 @@ button.core[type=button] {
|
||||||
width: 250px;
|
width: 250px;
|
||||||
margin-right: 15px;
|
margin-right: 15px;
|
||||||
}
|
}
|
||||||
.facet-info-total {
|
|
||||||
font-size: 0.8em;
|
|
||||||
color: #666;
|
|
||||||
padding-right: 0.25em;
|
|
||||||
}
|
|
||||||
.facet-info li,
|
.facet-info li,
|
||||||
.facet-info ul {
|
.facet-info ul {
|
||||||
margin: 0;
|
margin: 0;
|
||||||
|
|
@ -684,228 +284,15 @@ button.core[type=button] {
|
||||||
.facet-info a.cross:active {
|
.facet-info a.cross:active {
|
||||||
text-decoration: none;
|
text-decoration: none;
|
||||||
}
|
}
|
||||||
ul li.facet-truncated {
|
|
||||||
list-style-type: none;
|
|
||||||
position: relative;
|
|
||||||
top: -0.35em;
|
|
||||||
text-indent: 0.85em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.advanced-export {
|
.advanced-export {
|
||||||
margin-top: 1em;
|
margin-top: 1em;
|
||||||
padding: 0.01em 2em 0.01em 1em;
|
padding: 0.01em 2em 0.01em 1em;
|
||||||
width: auto;
|
width: auto;
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
box-shadow: 1px 2px 8px 2px rgba(0,0,0,0.08);
|
box-shadow: 1px 2px 8px 2px rgba(0,0,0,0.08);
|
||||||
background-color: white;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.download-sqlite em {
|
.download-sqlite em {
|
||||||
font-style: normal;
|
font-style: normal;
|
||||||
font-size: 0.8em;
|
font-size: 0.8em;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
p.zero-results {
|
|
||||||
border: 2px solid #ccc;
|
|
||||||
background-color: #eee;
|
|
||||||
padding: 0.5em;
|
|
||||||
font-style: italic;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Value types */
|
|
||||||
.type-float, .type-int {
|
|
||||||
color: #666;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/* Overrides ===============================================================*/
|
|
||||||
|
|
||||||
.small-screen-only,
|
|
||||||
.select-wrapper.small-screen-only {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
@media only screen and (max-width: 576px) {
|
|
||||||
|
|
||||||
.small-screen-only {
|
|
||||||
display: initial;
|
|
||||||
}
|
|
||||||
.select-wrapper.small-screen-only {
|
|
||||||
display: inline-block;
|
|
||||||
}
|
|
||||||
|
|
||||||
form.sql textarea {
|
|
||||||
width: 95%;
|
|
||||||
}
|
|
||||||
/* Force table to not be like tables anymore */
|
|
||||||
table.rows-and-columns,
|
|
||||||
.rows-and-columns thead,
|
|
||||||
.rows-and-columns tbody,
|
|
||||||
.rows-and-columns th,
|
|
||||||
.rows-and-columns td,
|
|
||||||
.rows-and-columns tr {
|
|
||||||
display: block;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Hide table headers (but not display: none;, for accessibility) */
|
|
||||||
.rows-and-columns thead tr {
|
|
||||||
position: absolute;
|
|
||||||
top: -9999px;
|
|
||||||
left: -9999px;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.rows-and-columns tr {
|
|
||||||
border: 1px solid #ccc;
|
|
||||||
margin-bottom: 1em;
|
|
||||||
border-radius: 10px;
|
|
||||||
background-color: white;
|
|
||||||
padding: 0.2rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.rows-and-columns td {
|
|
||||||
/* Behave like a "row" */
|
|
||||||
border: none;
|
|
||||||
border-bottom: 1px solid #eee;
|
|
||||||
padding: 0;
|
|
||||||
padding-left: 10%;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.rows-and-columns td:before {
|
|
||||||
display: block;
|
|
||||||
color: black;
|
|
||||||
margin-left: -10%;
|
|
||||||
font-size: 0.8em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.select-wrapper {
|
|
||||||
width: 100px;
|
|
||||||
}
|
|
||||||
.select-wrapper.filter-op {
|
|
||||||
width: 60px;
|
|
||||||
}
|
|
||||||
.filters input.filter-value {
|
|
||||||
width: 140px;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
svg.dropdown-menu-icon {
|
|
||||||
display: inline-block;
|
|
||||||
position: relative;
|
|
||||||
top: 2px;
|
|
||||||
cursor: pointer;
|
|
||||||
opacity: 0.8;
|
|
||||||
}
|
|
||||||
.dropdown-menu {
|
|
||||||
border: 1px solid #ccc;
|
|
||||||
border-radius: 4px;
|
|
||||||
line-height: 1.4;
|
|
||||||
font-size: 16px;
|
|
||||||
box-shadow: 2px 2px 2px #aaa;
|
|
||||||
background-color: #fff;
|
|
||||||
z-index: 1000;
|
|
||||||
}
|
|
||||||
.dropdown-menu ul,
|
|
||||||
.dropdown-menu li {
|
|
||||||
list-style-type: none;
|
|
||||||
margin: 0;
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
.dropdown-menu .dropdown-column-type {
|
|
||||||
font-size: 0.7em;
|
|
||||||
color: #666;
|
|
||||||
margin: 0;
|
|
||||||
padding: 4px 8px 4px 8px;
|
|
||||||
}
|
|
||||||
.dropdown-menu .dropdown-column-description {
|
|
||||||
margin: 0;
|
|
||||||
color: #666;
|
|
||||||
padding: 4px 8px 4px 8px;
|
|
||||||
max-width: 20em;
|
|
||||||
}
|
|
||||||
.dropdown-menu li {
|
|
||||||
border-bottom: 1px solid #ccc;
|
|
||||||
}
|
|
||||||
.dropdown-menu li:last-child {
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
.dropdown-menu a:link,
|
|
||||||
.dropdown-menu a:visited,
|
|
||||||
.dropdown-menu a:hover,
|
|
||||||
.dropdown-menu a:focus
|
|
||||||
.dropdown-menu a:active {
|
|
||||||
text-decoration: none;
|
|
||||||
display: block;
|
|
||||||
padding: 4px 8px 2px 8px;
|
|
||||||
color: #222;
|
|
||||||
white-space: nowrap;
|
|
||||||
}
|
|
||||||
.dropdown-menu a:hover {
|
|
||||||
background-color: #eee;
|
|
||||||
}
|
|
||||||
.dropdown-menu .dropdown-description {
|
|
||||||
margin: 0;
|
|
||||||
color: #666;
|
|
||||||
font-size: 0.8em;
|
|
||||||
max-width: 80vw;
|
|
||||||
white-space: normal;
|
|
||||||
}
|
|
||||||
.dropdown-menu .hook {
|
|
||||||
display: block;
|
|
||||||
position: absolute;
|
|
||||||
top: -5px;
|
|
||||||
left: 6px;
|
|
||||||
width: 0;
|
|
||||||
height: 0;
|
|
||||||
border-left: 5px solid transparent;
|
|
||||||
border-right: 5px solid transparent;
|
|
||||||
border-bottom: 5px solid #666;
|
|
||||||
}
|
|
||||||
|
|
||||||
.canned-query-edit-sql {
|
|
||||||
padding-left: 0.5em;
|
|
||||||
position: relative;
|
|
||||||
top: 1px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.blob-download {
|
|
||||||
display: block;
|
|
||||||
white-space: nowrap;
|
|
||||||
padding-right: 20px;
|
|
||||||
position: relative;
|
|
||||||
background-image: url("data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxNiAxNiIgd2lkdGg9IjE2IiBoZWlnaHQ9IjE2Ij48cGF0aCBmaWxsLXJ1bGU9ImV2ZW5vZGQiIGQ9Ik03LjQ3IDEwLjc4YS43NS43NSAwIDAwMS4wNiAwbDMuNzUtMy43NWEuNzUuNzUgMCAwMC0xLjA2LTEuMDZMOC43NSA4LjQ0VjEuNzVhLjc1Ljc1IDAgMDAtMS41IDB2Ni42OUw0Ljc4IDUuOTdhLjc1Ljc1IDAgMDAtMS4wNiAxLjA2bDMuNzUgMy43NXpNMy43NSAxM2EuNzUuNzUgMCAwMDAgMS41aDguNWEuNzUuNzUgMCAwMDAtMS41aC04LjV6Ij48L3BhdGg+PC9zdmc+");
|
|
||||||
background-size: 16px 16px;
|
|
||||||
background-position: right;
|
|
||||||
background-repeat: no-repeat;
|
|
||||||
}
|
|
||||||
|
|
||||||
dl.column-descriptions dt {
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
dl.column-descriptions dd {
|
|
||||||
padding-left: 1.5em;
|
|
||||||
white-space: pre-wrap;
|
|
||||||
line-height: 1.1em;
|
|
||||||
color: #666;
|
|
||||||
}
|
|
||||||
|
|
||||||
.anim-scale-in {
|
|
||||||
animation-name: scale-in;
|
|
||||||
animation-duration: 0.15s;
|
|
||||||
animation-timing-function: cubic-bezier(0.2, 0, 0.13, 1.5);
|
|
||||||
}
|
|
||||||
|
|
||||||
@keyframes scale-in {
|
|
||||||
0% {
|
|
||||||
opacity: 0;
|
|
||||||
transform: scale(0.6);
|
|
||||||
}
|
|
||||||
100% {
|
|
||||||
opacity: 1;
|
|
||||||
transform: scale(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
||||||
File diff suppressed because one or more lines are too long
|
|
@ -1,74 +0,0 @@
|
||||||
import { EditorView, basicSetup } from "codemirror";
|
|
||||||
import { keymap } from "@codemirror/view";
|
|
||||||
import { sql, SQLDialect } from "@codemirror/lang-sql";
|
|
||||||
|
|
||||||
// A variation of SQLite from lang-sql https://github.com/codemirror/lang-sql/blob/ebf115fffdbe07f91465ccbd82868c587f8182bc/src/sql.ts#L231
|
|
||||||
const SQLite = SQLDialect.define({
|
|
||||||
// Based on https://www.sqlite.org/lang_keywords.html based on likely keywords to be used in select queries
|
|
||||||
// https://github.com/simonw/datasette/pull/1893#issuecomment-1316401895:
|
|
||||||
keywords:
|
|
||||||
"and as asc between by case cast count current_date current_time current_timestamp desc distinct each else escape except exists explain filter first for from full generated group having if in index inner intersect into isnull join last left like limit not null or order outer over pragma primary query raise range regexp right rollback row select set table then to union unique using values view virtual when where",
|
|
||||||
// https://www.sqlite.org/datatype3.html
|
|
||||||
types: "null integer real text blob",
|
|
||||||
builtin: "",
|
|
||||||
operatorChars: "*+-%<>!=&|/~",
|
|
||||||
identifierQuotes: '`"',
|
|
||||||
specialVar: "@:?$",
|
|
||||||
});
|
|
||||||
|
|
||||||
// Utility function from https://codemirror.net/docs/migration/
|
|
||||||
export function editorFromTextArea(textarea, conf = {}) {
|
|
||||||
// This could also be configured with a set of tables and columns for better autocomplete:
|
|
||||||
// https://github.com/codemirror/lang-sql#user-content-sqlconfig.tables
|
|
||||||
let view = new EditorView({
|
|
||||||
doc: textarea.value,
|
|
||||||
extensions: [
|
|
||||||
keymap.of([
|
|
||||||
{
|
|
||||||
key: "Shift-Enter",
|
|
||||||
run: function () {
|
|
||||||
textarea.value = view.state.doc.toString();
|
|
||||||
textarea.form.submit();
|
|
||||||
return true;
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
key: "Meta-Enter",
|
|
||||||
run: function () {
|
|
||||||
textarea.value = view.state.doc.toString();
|
|
||||||
textarea.form.submit();
|
|
||||||
return true;
|
|
||||||
},
|
|
||||||
},
|
|
||||||
]),
|
|
||||||
// This has to be after the keymap or else the basicSetup keys will prevent
|
|
||||||
// Meta-Enter from running
|
|
||||||
basicSetup,
|
|
||||||
EditorView.lineWrapping,
|
|
||||||
sql({
|
|
||||||
dialect: SQLite,
|
|
||||||
schema: conf.schema,
|
|
||||||
tables: conf.tables,
|
|
||||||
defaultTableName: conf.defaultTableName,
|
|
||||||
defaultSchemaName: conf.defaultSchemaName,
|
|
||||||
}),
|
|
||||||
],
|
|
||||||
});
|
|
||||||
|
|
||||||
// Idea taken from https://discuss.codemirror.net/t/resizing-codemirror-6/3265.
|
|
||||||
// Using CSS resize: both and scheduling a measurement when the element changes.
|
|
||||||
let editorDOM = view.contentDOM.closest(".cm-editor");
|
|
||||||
let observer = new ResizeObserver(function () {
|
|
||||||
view.requestMeasure();
|
|
||||||
});
|
|
||||||
observer.observe(editorDOM, { attributes: true });
|
|
||||||
|
|
||||||
textarea.parentNode.insertBefore(view.dom, textarea);
|
|
||||||
textarea.style.display = "none";
|
|
||||||
if (textarea.form) {
|
|
||||||
textarea.form.addEventListener("submit", () => {
|
|
||||||
textarea.value = view.state.doc.toString();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return view;
|
|
||||||
}
|
|
||||||
2
datasette/static/codemirror-5.31.0-min.css
vendored
Normal file
2
datasette/static/codemirror-5.31.0-min.css
vendored
Normal file
File diff suppressed because one or more lines are too long
1
datasette/static/codemirror-5.31.0-sql.min.js
vendored
Normal file
1
datasette/static/codemirror-5.31.0-sql.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
9659
datasette/static/codemirror-5.31.0.js
vendored
Normal file
9659
datasette/static/codemirror-5.31.0.js
vendored
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -1,210 +0,0 @@
|
||||||
// Custom events for use with the native CustomEvent API
|
|
||||||
const DATASETTE_EVENTS = {
|
|
||||||
INIT: "datasette_init", // returns datasette manager instance in evt.detail
|
|
||||||
};
|
|
||||||
|
|
||||||
// Datasette "core" -> Methods/APIs that are foundational
|
|
||||||
// Plugins will have greater stability if they use the functional hooks- but if they do decide to hook into
|
|
||||||
// literal DOM selectors, they'll have an easier time using these addresses.
|
|
||||||
const DOM_SELECTORS = {
|
|
||||||
/** Should have one match */
|
|
||||||
jsonExportLink: ".export-links a[href*=json]",
|
|
||||||
|
|
||||||
/** Event listeners that go outside of the main table, e.g. existing scroll listener */
|
|
||||||
tableWrapper: ".table-wrapper",
|
|
||||||
table: "table.rows-and-columns",
|
|
||||||
aboveTablePanel: ".above-table-panel",
|
|
||||||
|
|
||||||
// These could have multiple matches
|
|
||||||
/** Used for selecting table headers. Use makeColumnActions if you want to add menu items. */
|
|
||||||
tableHeaders: `table.rows-and-columns th`,
|
|
||||||
|
|
||||||
/** Used to add "where" clauses to query using direct manipulation */
|
|
||||||
filterRows: ".filter-row",
|
|
||||||
/** Used to show top available enum values for a column ("facets") */
|
|
||||||
facetResults: ".facet-results [data-column]",
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Monolith class for interacting with Datasette JS API
|
|
||||||
* Imported with DEFER, runs after main document parsed
|
|
||||||
* For now, manually synced with datasette/version.py
|
|
||||||
*/
|
|
||||||
const datasetteManager = {
|
|
||||||
VERSION: window.datasetteVersion,
|
|
||||||
|
|
||||||
// TODO: Should order of registration matter more?
|
|
||||||
|
|
||||||
// Should plugins be allowed to clobber others or is it last-in takes priority?
|
|
||||||
// Does pluginMetadata need to be serializable, or can we let it be stateful / have functions?
|
|
||||||
plugins: new Map(),
|
|
||||||
|
|
||||||
registerPlugin: (name, pluginMetadata) => {
|
|
||||||
if (datasetteManager.plugins.has(name)) {
|
|
||||||
console.warn(`Warning -> plugin ${name} was redefined`);
|
|
||||||
}
|
|
||||||
datasetteManager.plugins.set(name, pluginMetadata);
|
|
||||||
|
|
||||||
// If the plugin participates in the panel... update the panel.
|
|
||||||
if (pluginMetadata.makeAboveTablePanelConfigs) {
|
|
||||||
datasetteManager.renderAboveTablePanel();
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
/**
|
|
||||||
* New DOM elements are created on each click, so the data is not stale.
|
|
||||||
*
|
|
||||||
* Items
|
|
||||||
* - must provide label (text)
|
|
||||||
* - might provide href (string) or an onclick ((evt) => void)
|
|
||||||
*
|
|
||||||
* columnMeta is metadata stored on the column header (TH) as a DOMStringMap
|
|
||||||
* - column: string
|
|
||||||
* - columnNotNull: boolean
|
|
||||||
* - columnType: sqlite datatype enum (text, number, etc)
|
|
||||||
* - isPk: boolean
|
|
||||||
*/
|
|
||||||
makeColumnActions: (columnMeta) => {
|
|
||||||
let columnActions = [];
|
|
||||||
|
|
||||||
// Accept function that returns list of columnActions with keys
|
|
||||||
// Required: label (text)
|
|
||||||
// Optional: onClick or href
|
|
||||||
datasetteManager.plugins.forEach((plugin) => {
|
|
||||||
if (plugin.makeColumnActions) {
|
|
||||||
// Plugins can provide multiple columnActions if they want
|
|
||||||
// If multiple try to create entry with same label, the last one deletes the others
|
|
||||||
columnActions.push(...plugin.makeColumnActions(columnMeta));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// TODO: Validate columnAction configs and give informative error message if missing keys.
|
|
||||||
return columnActions;
|
|
||||||
},
|
|
||||||
|
|
||||||
/**
|
|
||||||
* In MVP, each plugin can only have 1 instance.
|
|
||||||
* In future, panels could be repeated. We omit that for now since so many plugins depend on
|
|
||||||
* shared URL state, so having multiple instances of plugin at same time is problematic.
|
|
||||||
* Currently, we never destroy any panels, we just hide them.
|
|
||||||
*
|
|
||||||
* TODO: nicer panel css, show panel selection state.
|
|
||||||
* TODO: does this hook need to take any arguments?
|
|
||||||
*/
|
|
||||||
renderAboveTablePanel: () => {
|
|
||||||
const aboveTablePanel = document.querySelector(
|
|
||||||
DOM_SELECTORS.aboveTablePanel,
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!aboveTablePanel) {
|
|
||||||
console.warn(
|
|
||||||
"This page does not have a table, the renderAboveTablePanel cannot be used.",
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
let aboveTablePanelWrapper = aboveTablePanel.querySelector(".panels");
|
|
||||||
|
|
||||||
// First render: create wrappers. Otherwise, reuse previous.
|
|
||||||
if (!aboveTablePanelWrapper) {
|
|
||||||
aboveTablePanelWrapper = document.createElement("div");
|
|
||||||
aboveTablePanelWrapper.classList.add("tab-contents");
|
|
||||||
const panelNav = document.createElement("div");
|
|
||||||
panelNav.classList.add("tab-controls");
|
|
||||||
|
|
||||||
// Temporary: css for minimal amount of breathing room.
|
|
||||||
panelNav.style.display = "flex";
|
|
||||||
panelNav.style.gap = "8px";
|
|
||||||
panelNav.style.marginTop = "4px";
|
|
||||||
panelNav.style.marginBottom = "20px";
|
|
||||||
|
|
||||||
aboveTablePanel.appendChild(panelNav);
|
|
||||||
aboveTablePanel.appendChild(aboveTablePanelWrapper);
|
|
||||||
}
|
|
||||||
|
|
||||||
datasetteManager.plugins.forEach((plugin, pluginName) => {
|
|
||||||
const { makeAboveTablePanelConfigs } = plugin;
|
|
||||||
|
|
||||||
if (makeAboveTablePanelConfigs) {
|
|
||||||
const controls = aboveTablePanel.querySelector(".tab-controls");
|
|
||||||
const contents = aboveTablePanel.querySelector(".tab-contents");
|
|
||||||
|
|
||||||
// Each plugin can make multiple panels
|
|
||||||
const configs = makeAboveTablePanelConfigs();
|
|
||||||
|
|
||||||
configs.forEach((config, i) => {
|
|
||||||
const nodeContentId = `${pluginName}_${config.id}_panel-content`;
|
|
||||||
|
|
||||||
// quit if we've already registered this plugin
|
|
||||||
// TODO: look into whether plugins should be allowed to ask
|
|
||||||
// parent to re-render, or if they should manage that internally.
|
|
||||||
if (document.getElementById(nodeContentId)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add tab control button
|
|
||||||
const pluginControl = document.createElement("button");
|
|
||||||
pluginControl.textContent = config.label;
|
|
||||||
pluginControl.onclick = () => {
|
|
||||||
contents.childNodes.forEach((node) => {
|
|
||||||
if (node.id === nodeContentId) {
|
|
||||||
node.style.display = "block";
|
|
||||||
} else {
|
|
||||||
node.style.display = "none";
|
|
||||||
}
|
|
||||||
});
|
|
||||||
};
|
|
||||||
controls.appendChild(pluginControl);
|
|
||||||
|
|
||||||
// Add plugin content area
|
|
||||||
const pluginNode = document.createElement("div");
|
|
||||||
pluginNode.id = nodeContentId;
|
|
||||||
config.render(pluginNode);
|
|
||||||
pluginNode.style.display = "none"; // Default to hidden unless you're ifrst
|
|
||||||
|
|
||||||
contents.appendChild(pluginNode);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Let first node be selected by default
|
|
||||||
if (contents.childNodes.length) {
|
|
||||||
contents.childNodes[0].style.display = "block";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
},
|
|
||||||
|
|
||||||
/** Selectors for document (DOM) elements. Store identifier instead of immediate references in case they haven't loaded when Manager starts. */
|
|
||||||
selectors: DOM_SELECTORS,
|
|
||||||
|
|
||||||
// Future API ideas
|
|
||||||
// Fetch page's data in array, and cache so plugins could reuse it
|
|
||||||
// Provide knowledge of what datasette JS or server-side via traditional console autocomplete
|
|
||||||
// State helpers: URL params https://github.com/simonw/datasette/issues/1144 and localstorage
|
|
||||||
// UI Hooks: command + k, tab manager hook
|
|
||||||
// Should we notify plugins that have dependencies
|
|
||||||
// when all dependencies were fulfilled? (leaflet, codemirror, etc)
|
|
||||||
// https://github.com/simonw/datasette-leaflet -> this way
|
|
||||||
// multiple plugins can all request the same copy of leaflet.
|
|
||||||
};
|
|
||||||
|
|
||||||
const initializeDatasette = () => {
|
|
||||||
// Hide the global behind __ prefix. Ideally they should be listening for the
|
|
||||||
// DATASETTE_EVENTS.INIT event to avoid the habit of reading from the window.
|
|
||||||
|
|
||||||
window.__DATASETTE__ = datasetteManager;
|
|
||||||
console.debug("Datasette Manager Created!");
|
|
||||||
|
|
||||||
const initDatasetteEvent = new CustomEvent(DATASETTE_EVENTS.INIT, {
|
|
||||||
detail: datasetteManager,
|
|
||||||
});
|
|
||||||
|
|
||||||
document.dispatchEvent(initDatasetteEvent);
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Main function
|
|
||||||
* Fires AFTER the document has been parsed
|
|
||||||
*/
|
|
||||||
document.addEventListener("DOMContentLoaded", function () {
|
|
||||||
initializeDatasette();
|
|
||||||
});
|
|
||||||
Binary file not shown.
|
Before Width: | Height: | Size: 208 B |
|
|
@ -1,56 +0,0 @@
|
||||||
/*
|
|
||||||
https://github.com/luyilin/json-format-highlight
|
|
||||||
From https://unpkg.com/json-format-highlight@1.0.1/dist/json-format-highlight.js
|
|
||||||
MIT Licensed
|
|
||||||
*/
|
|
||||||
(function (global, factory) {
|
|
||||||
typeof exports === "object" && typeof module !== "undefined"
|
|
||||||
? (module.exports = factory())
|
|
||||||
: typeof define === "function" && define.amd
|
|
||||||
? define(factory)
|
|
||||||
: (global.jsonFormatHighlight = factory());
|
|
||||||
})(this, function () {
|
|
||||||
"use strict";
|
|
||||||
|
|
||||||
var defaultColors = {
|
|
||||||
keyColor: "dimgray",
|
|
||||||
numberColor: "lightskyblue",
|
|
||||||
stringColor: "lightcoral",
|
|
||||||
trueColor: "lightseagreen",
|
|
||||||
falseColor: "#f66578",
|
|
||||||
nullColor: "cornflowerblue",
|
|
||||||
};
|
|
||||||
|
|
||||||
function index(json, colorOptions) {
|
|
||||||
if (colorOptions === void 0) colorOptions = {};
|
|
||||||
|
|
||||||
if (!json) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (typeof json !== "string") {
|
|
||||||
json = JSON.stringify(json, null, 2);
|
|
||||||
}
|
|
||||||
var colors = Object.assign({}, defaultColors, colorOptions);
|
|
||||||
json = json.replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">");
|
|
||||||
return json.replace(
|
|
||||||
/("(\\u[a-zA-Z0-9]{4}|\\[^u]|[^\\"])*"(\s*:)?|\b(true|false|null)\b|-?\d+(?:\.\d*)?(?:[eE][+]?\d+)?)/g,
|
|
||||||
function (match) {
|
|
||||||
var color = colors.numberColor;
|
|
||||||
if (/^"/.test(match)) {
|
|
||||||
color = /:$/.test(match) ? colors.keyColor : colors.stringColor;
|
|
||||||
} else {
|
|
||||||
color = /true/.test(match)
|
|
||||||
? colors.trueColor
|
|
||||||
: /false/.test(match)
|
|
||||||
? colors.falseColor
|
|
||||||
: /null/.test(match)
|
|
||||||
? colors.nullColor
|
|
||||||
: color;
|
|
||||||
}
|
|
||||||
return '<span style="color: ' + color + '">' + match + "</span>";
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return index;
|
|
||||||
});
|
|
||||||
|
|
@ -1,416 +0,0 @@
|
||||||
class NavigationSearch extends HTMLElement {
|
|
||||||
constructor() {
|
|
||||||
super();
|
|
||||||
this.attachShadow({ mode: "open" });
|
|
||||||
this.selectedIndex = -1;
|
|
||||||
this.matches = [];
|
|
||||||
this.debounceTimer = null;
|
|
||||||
|
|
||||||
this.render();
|
|
||||||
this.setupEventListeners();
|
|
||||||
}
|
|
||||||
|
|
||||||
render() {
|
|
||||||
this.shadowRoot.innerHTML = `
|
|
||||||
<style>
|
|
||||||
:host {
|
|
||||||
display: contents;
|
|
||||||
}
|
|
||||||
|
|
||||||
dialog {
|
|
||||||
border: none;
|
|
||||||
border-radius: 0.75rem;
|
|
||||||
padding: 0;
|
|
||||||
max-width: 90vw;
|
|
||||||
width: 600px;
|
|
||||||
max-height: 80vh;
|
|
||||||
box-shadow: 0 20px 25px -5px rgba(0, 0, 0, 0.1), 0 10px 10px -5px rgba(0, 0, 0, 0.04);
|
|
||||||
animation: slideIn 0.2s ease-out;
|
|
||||||
}
|
|
||||||
|
|
||||||
dialog::backdrop {
|
|
||||||
background: rgba(0, 0, 0, 0.5);
|
|
||||||
backdrop-filter: blur(4px);
|
|
||||||
animation: fadeIn 0.2s ease-out;
|
|
||||||
}
|
|
||||||
|
|
||||||
@keyframes slideIn {
|
|
||||||
from {
|
|
||||||
opacity: 0;
|
|
||||||
transform: translateY(-20px) scale(0.95);
|
|
||||||
}
|
|
||||||
to {
|
|
||||||
opacity: 1;
|
|
||||||
transform: translateY(0) scale(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@keyframes fadeIn {
|
|
||||||
from { opacity: 0; }
|
|
||||||
to { opacity: 1; }
|
|
||||||
}
|
|
||||||
|
|
||||||
.search-container {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
height: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.search-input-wrapper {
|
|
||||||
padding: 1.25rem;
|
|
||||||
border-bottom: 1px solid #e5e7eb;
|
|
||||||
}
|
|
||||||
|
|
||||||
.search-input {
|
|
||||||
width: 100%;
|
|
||||||
padding: 0.75rem 1rem;
|
|
||||||
font-size: 1rem;
|
|
||||||
border: 2px solid #e5e7eb;
|
|
||||||
border-radius: 0.5rem;
|
|
||||||
outline: none;
|
|
||||||
transition: border-color 0.2s;
|
|
||||||
box-sizing: border-box;
|
|
||||||
}
|
|
||||||
|
|
||||||
.search-input:focus {
|
|
||||||
border-color: #2563eb;
|
|
||||||
}
|
|
||||||
|
|
||||||
.results-container {
|
|
||||||
overflow-y: auto;
|
|
||||||
height: calc(80vh - 180px);
|
|
||||||
padding: 0.5rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.result-item {
|
|
||||||
padding: 0.875rem 1rem;
|
|
||||||
cursor: pointer;
|
|
||||||
border-radius: 0.5rem;
|
|
||||||
transition: background-color 0.15s;
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 0.75rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.result-item:hover {
|
|
||||||
background-color: #f3f4f6;
|
|
||||||
}
|
|
||||||
|
|
||||||
.result-item.selected {
|
|
||||||
background-color: #dbeafe;
|
|
||||||
}
|
|
||||||
|
|
||||||
.result-name {
|
|
||||||
font-weight: 500;
|
|
||||||
color: #111827;
|
|
||||||
}
|
|
||||||
|
|
||||||
.result-url {
|
|
||||||
font-size: 0.875rem;
|
|
||||||
color: #6b7280;
|
|
||||||
}
|
|
||||||
|
|
||||||
.no-results {
|
|
||||||
padding: 2rem;
|
|
||||||
text-align: center;
|
|
||||||
color: #6b7280;
|
|
||||||
}
|
|
||||||
|
|
||||||
.hint-text {
|
|
||||||
padding: 0.75rem 1.25rem;
|
|
||||||
font-size: 0.875rem;
|
|
||||||
color: #6b7280;
|
|
||||||
border-top: 1px solid #e5e7eb;
|
|
||||||
display: flex;
|
|
||||||
gap: 1rem;
|
|
||||||
flex-wrap: wrap;
|
|
||||||
}
|
|
||||||
|
|
||||||
.hint-text kbd {
|
|
||||||
background: #f3f4f6;
|
|
||||||
padding: 0.125rem 0.375rem;
|
|
||||||
border-radius: 0.25rem;
|
|
||||||
font-size: 0.75rem;
|
|
||||||
border: 1px solid #d1d5db;
|
|
||||||
font-family: monospace;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Mobile optimizations */
|
|
||||||
@media (max-width: 640px) {
|
|
||||||
dialog {
|
|
||||||
width: 95vw;
|
|
||||||
max-height: 85vh;
|
|
||||||
border-radius: 0.5rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.search-input-wrapper {
|
|
||||||
padding: 1rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.search-input {
|
|
||||||
font-size: 16px; /* Prevents zoom on iOS */
|
|
||||||
}
|
|
||||||
|
|
||||||
.result-item {
|
|
||||||
padding: 1rem 0.75rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.hint-text {
|
|
||||||
font-size: 0.8rem;
|
|
||||||
padding: 0.5rem 1rem;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
|
|
||||||
<dialog>
|
|
||||||
<div class="search-container">
|
|
||||||
<div class="search-input-wrapper">
|
|
||||||
<input
|
|
||||||
type="text"
|
|
||||||
class="search-input"
|
|
||||||
placeholder="Search..."
|
|
||||||
aria-label="Search navigation"
|
|
||||||
autocomplete="off"
|
|
||||||
spellcheck="false"
|
|
||||||
>
|
|
||||||
</div>
|
|
||||||
<div class="results-container" role="listbox"></div>
|
|
||||||
<div class="hint-text">
|
|
||||||
<span><kbd>↑</kbd> <kbd>↓</kbd> Navigate</span>
|
|
||||||
<span><kbd>Enter</kbd> Select</span>
|
|
||||||
<span><kbd>Esc</kbd> Close</span>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</dialog>
|
|
||||||
`;
|
|
||||||
}
|
|
||||||
|
|
||||||
setupEventListeners() {
|
|
||||||
const dialog = this.shadowRoot.querySelector("dialog");
|
|
||||||
const input = this.shadowRoot.querySelector(".search-input");
|
|
||||||
const resultsContainer =
|
|
||||||
this.shadowRoot.querySelector(".results-container");
|
|
||||||
|
|
||||||
// Global keyboard listener for "/"
|
|
||||||
document.addEventListener("keydown", (e) => {
|
|
||||||
if (e.key === "/" && !this.isInputFocused() && !dialog.open) {
|
|
||||||
e.preventDefault();
|
|
||||||
this.openMenu();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Input event
|
|
||||||
input.addEventListener("input", (e) => {
|
|
||||||
this.handleSearch(e.target.value);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Keyboard navigation
|
|
||||||
input.addEventListener("keydown", (e) => {
|
|
||||||
if (e.key === "ArrowDown") {
|
|
||||||
e.preventDefault();
|
|
||||||
this.moveSelection(1);
|
|
||||||
} else if (e.key === "ArrowUp") {
|
|
||||||
e.preventDefault();
|
|
||||||
this.moveSelection(-1);
|
|
||||||
} else if (e.key === "Enter") {
|
|
||||||
e.preventDefault();
|
|
||||||
this.selectCurrentItem();
|
|
||||||
} else if (e.key === "Escape") {
|
|
||||||
this.closeMenu();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Click on result item
|
|
||||||
resultsContainer.addEventListener("click", (e) => {
|
|
||||||
const item = e.target.closest(".result-item");
|
|
||||||
if (item) {
|
|
||||||
const index = parseInt(item.dataset.index);
|
|
||||||
this.selectItem(index);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Close on backdrop click
|
|
||||||
dialog.addEventListener("click", (e) => {
|
|
||||||
if (e.target === dialog) {
|
|
||||||
this.closeMenu();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Initial load
|
|
||||||
this.loadInitialData();
|
|
||||||
}
|
|
||||||
|
|
||||||
isInputFocused() {
|
|
||||||
const activeElement = document.activeElement;
|
|
||||||
return (
|
|
||||||
activeElement &&
|
|
||||||
(activeElement.tagName === "INPUT" ||
|
|
||||||
activeElement.tagName === "TEXTAREA" ||
|
|
||||||
activeElement.isContentEditable)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
loadInitialData() {
|
|
||||||
const itemsAttr = this.getAttribute("items");
|
|
||||||
if (itemsAttr) {
|
|
||||||
try {
|
|
||||||
this.allItems = JSON.parse(itemsAttr);
|
|
||||||
this.matches = this.allItems;
|
|
||||||
} catch (e) {
|
|
||||||
console.error("Failed to parse items attribute:", e);
|
|
||||||
this.allItems = [];
|
|
||||||
this.matches = [];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
handleSearch(query) {
|
|
||||||
clearTimeout(this.debounceTimer);
|
|
||||||
|
|
||||||
this.debounceTimer = setTimeout(() => {
|
|
||||||
const url = this.getAttribute("url");
|
|
||||||
|
|
||||||
if (url) {
|
|
||||||
// Fetch from API
|
|
||||||
this.fetchResults(url, query);
|
|
||||||
} else {
|
|
||||||
// Filter local items
|
|
||||||
this.filterLocalItems(query);
|
|
||||||
}
|
|
||||||
}, 200);
|
|
||||||
}
|
|
||||||
|
|
||||||
async fetchResults(url, query) {
|
|
||||||
try {
|
|
||||||
const searchUrl = `${url}?q=${encodeURIComponent(query)}`;
|
|
||||||
const response = await fetch(searchUrl);
|
|
||||||
const data = await response.json();
|
|
||||||
this.matches = data.matches || [];
|
|
||||||
this.selectedIndex = this.matches.length > 0 ? 0 : -1;
|
|
||||||
this.renderResults();
|
|
||||||
} catch (e) {
|
|
||||||
console.error("Failed to fetch search results:", e);
|
|
||||||
this.matches = [];
|
|
||||||
this.renderResults();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
filterLocalItems(query) {
|
|
||||||
if (!query.trim()) {
|
|
||||||
this.matches = [];
|
|
||||||
} else {
|
|
||||||
const lowerQuery = query.toLowerCase();
|
|
||||||
this.matches = (this.allItems || []).filter(
|
|
||||||
(item) =>
|
|
||||||
item.name.toLowerCase().includes(lowerQuery) ||
|
|
||||||
item.url.toLowerCase().includes(lowerQuery),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
this.selectedIndex = this.matches.length > 0 ? 0 : -1;
|
|
||||||
this.renderResults();
|
|
||||||
}
|
|
||||||
|
|
||||||
renderResults() {
|
|
||||||
const container = this.shadowRoot.querySelector(".results-container");
|
|
||||||
const input = this.shadowRoot.querySelector(".search-input");
|
|
||||||
|
|
||||||
if (this.matches.length === 0) {
|
|
||||||
const message = input.value.trim()
|
|
||||||
? "No results found"
|
|
||||||
: "Start typing to search...";
|
|
||||||
container.innerHTML = `<div class="no-results">${message}</div>`;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
container.innerHTML = this.matches
|
|
||||||
.map(
|
|
||||||
(match, index) => `
|
|
||||||
<div
|
|
||||||
class="result-item ${
|
|
||||||
index === this.selectedIndex ? "selected" : ""
|
|
||||||
}"
|
|
||||||
data-index="${index}"
|
|
||||||
role="option"
|
|
||||||
aria-selected="${index === this.selectedIndex}"
|
|
||||||
>
|
|
||||||
<div>
|
|
||||||
<div class="result-name">${this.escapeHtml(
|
|
||||||
match.name,
|
|
||||||
)}</div>
|
|
||||||
<div class="result-url">${this.escapeHtml(match.url)}</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
`,
|
|
||||||
)
|
|
||||||
.join("");
|
|
||||||
|
|
||||||
// Scroll selected item into view
|
|
||||||
if (this.selectedIndex >= 0) {
|
|
||||||
const selectedItem = container.children[this.selectedIndex];
|
|
||||||
if (selectedItem) {
|
|
||||||
selectedItem.scrollIntoView({ block: "nearest" });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
moveSelection(direction) {
|
|
||||||
const newIndex = this.selectedIndex + direction;
|
|
||||||
if (newIndex >= 0 && newIndex < this.matches.length) {
|
|
||||||
this.selectedIndex = newIndex;
|
|
||||||
this.renderResults();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
selectCurrentItem() {
|
|
||||||
if (this.selectedIndex >= 0 && this.selectedIndex < this.matches.length) {
|
|
||||||
this.selectItem(this.selectedIndex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
selectItem(index) {
|
|
||||||
const match = this.matches[index];
|
|
||||||
if (match) {
|
|
||||||
// Dispatch custom event
|
|
||||||
this.dispatchEvent(
|
|
||||||
new CustomEvent("select", {
|
|
||||||
detail: match,
|
|
||||||
bubbles: true,
|
|
||||||
composed: true,
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
|
|
||||||
// Navigate to URL
|
|
||||||
window.location.href = match.url;
|
|
||||||
|
|
||||||
this.closeMenu();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
openMenu() {
|
|
||||||
const dialog = this.shadowRoot.querySelector("dialog");
|
|
||||||
const input = this.shadowRoot.querySelector(".search-input");
|
|
||||||
|
|
||||||
dialog.showModal();
|
|
||||||
input.value = "";
|
|
||||||
input.focus();
|
|
||||||
|
|
||||||
// Reset state - start with no items shown
|
|
||||||
this.matches = [];
|
|
||||||
this.selectedIndex = -1;
|
|
||||||
this.renderResults();
|
|
||||||
}
|
|
||||||
|
|
||||||
closeMenu() {
|
|
||||||
const dialog = this.shadowRoot.querySelector("dialog");
|
|
||||||
dialog.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
escapeHtml(text) {
|
|
||||||
const div = document.createElement("div");
|
|
||||||
div.textContent = text;
|
|
||||||
return div.innerHTML;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Register the custom element
|
|
||||||
customElements.define("navigation-search", NavigationSearch);
|
|
||||||
5
datasette/static/sql-formatter-2.3.3.min.js
vendored
5
datasette/static/sql-formatter-2.3.3.min.js
vendored
File diff suppressed because one or more lines are too long
|
|
@ -1,343 +0,0 @@
|
||||||
var DROPDOWN_HTML = `<div class="dropdown-menu">
|
|
||||||
<div class="hook"></div>
|
|
||||||
<ul>
|
|
||||||
<li><a class="dropdown-sort-asc" href="#">Sort ascending</a></li>
|
|
||||||
<li><a class="dropdown-sort-desc" href="#">Sort descending</a></li>
|
|
||||||
<li><a class="dropdown-facet" href="#">Facet by this</a></li>
|
|
||||||
<li><a class="dropdown-hide-column" href="#">Hide this column</a></li>
|
|
||||||
<li><a class="dropdown-show-all-columns" href="#">Show all columns</a></li>
|
|
||||||
<li><a class="dropdown-not-blank" href="#">Show not-blank rows</a></li>
|
|
||||||
</ul>
|
|
||||||
<p class="dropdown-column-type"></p>
|
|
||||||
<p class="dropdown-column-description"></p>
|
|
||||||
</div>`;
|
|
||||||
|
|
||||||
var DROPDOWN_ICON_SVG = `<svg xmlns="http://www.w3.org/2000/svg" width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
|
||||||
<circle cx="12" cy="12" r="3"></circle>
|
|
||||||
<path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z"></path>
|
|
||||||
</svg>`;
|
|
||||||
|
|
||||||
/** Main initialization function for Datasette Table interactions */
|
|
||||||
const initDatasetteTable = function (manager) {
|
|
||||||
// Feature detection
|
|
||||||
if (!window.URLSearchParams) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
function getParams() {
|
|
||||||
return new URLSearchParams(location.search);
|
|
||||||
}
|
|
||||||
function paramsToUrl(params) {
|
|
||||||
var s = params.toString();
|
|
||||||
return s ? "?" + s : location.pathname;
|
|
||||||
}
|
|
||||||
function sortDescUrl(column) {
|
|
||||||
var params = getParams();
|
|
||||||
params.set("_sort_desc", column);
|
|
||||||
params.delete("_sort");
|
|
||||||
params.delete("_next");
|
|
||||||
return paramsToUrl(params);
|
|
||||||
}
|
|
||||||
function sortAscUrl(column) {
|
|
||||||
var params = getParams();
|
|
||||||
params.set("_sort", column);
|
|
||||||
params.delete("_sort_desc");
|
|
||||||
params.delete("_next");
|
|
||||||
return paramsToUrl(params);
|
|
||||||
}
|
|
||||||
function facetUrl(column) {
|
|
||||||
var params = getParams();
|
|
||||||
params.append("_facet", column);
|
|
||||||
return paramsToUrl(params);
|
|
||||||
}
|
|
||||||
function hideColumnUrl(column) {
|
|
||||||
var params = getParams();
|
|
||||||
params.append("_nocol", column);
|
|
||||||
return paramsToUrl(params);
|
|
||||||
}
|
|
||||||
function showAllColumnsUrl() {
|
|
||||||
var params = getParams();
|
|
||||||
params.delete("_nocol");
|
|
||||||
params.delete("_col");
|
|
||||||
return paramsToUrl(params);
|
|
||||||
}
|
|
||||||
function notBlankUrl(column) {
|
|
||||||
var params = getParams();
|
|
||||||
params.set(`${column}__notblank`, "1");
|
|
||||||
return paramsToUrl(params);
|
|
||||||
}
|
|
||||||
function closeMenu() {
|
|
||||||
menu.style.display = "none";
|
|
||||||
menu.classList.remove("anim-scale-in");
|
|
||||||
}
|
|
||||||
|
|
||||||
const tableWrapper = document.querySelector(manager.selectors.tableWrapper);
|
|
||||||
if (tableWrapper) {
|
|
||||||
tableWrapper.addEventListener("scroll", closeMenu);
|
|
||||||
}
|
|
||||||
document.body.addEventListener("click", (ev) => {
|
|
||||||
/* was this click outside the menu? */
|
|
||||||
var target = ev.target;
|
|
||||||
while (target && target != menu) {
|
|
||||||
target = target.parentNode;
|
|
||||||
}
|
|
||||||
if (!target) {
|
|
||||||
closeMenu();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
function onTableHeaderClick(ev) {
|
|
||||||
ev.preventDefault();
|
|
||||||
ev.stopPropagation();
|
|
||||||
menu.innerHTML = DROPDOWN_HTML;
|
|
||||||
var th = ev.target;
|
|
||||||
while (th.nodeName != "TH") {
|
|
||||||
th = th.parentNode;
|
|
||||||
}
|
|
||||||
var rect = th.getBoundingClientRect();
|
|
||||||
var menuTop = rect.bottom + window.scrollY;
|
|
||||||
var menuLeft = rect.left + window.scrollX;
|
|
||||||
var column = th.getAttribute("data-column");
|
|
||||||
var params = getParams();
|
|
||||||
var sort = menu.querySelector("a.dropdown-sort-asc");
|
|
||||||
var sortDesc = menu.querySelector("a.dropdown-sort-desc");
|
|
||||||
var facetItem = menu.querySelector("a.dropdown-facet");
|
|
||||||
var notBlank = menu.querySelector("a.dropdown-not-blank");
|
|
||||||
var hideColumn = menu.querySelector("a.dropdown-hide-column");
|
|
||||||
var showAllColumns = menu.querySelector("a.dropdown-show-all-columns");
|
|
||||||
if (params.get("_sort") == column) {
|
|
||||||
sort.parentNode.style.display = "none";
|
|
||||||
} else {
|
|
||||||
sort.parentNode.style.display = "block";
|
|
||||||
sort.setAttribute("href", sortAscUrl(column));
|
|
||||||
}
|
|
||||||
if (params.get("_sort_desc") == column) {
|
|
||||||
sortDesc.parentNode.style.display = "none";
|
|
||||||
} else {
|
|
||||||
sortDesc.parentNode.style.display = "block";
|
|
||||||
sortDesc.setAttribute("href", sortDescUrl(column));
|
|
||||||
}
|
|
||||||
/* Show hide columns options */
|
|
||||||
if (params.get("_nocol") || params.get("_col")) {
|
|
||||||
showAllColumns.parentNode.style.display = "block";
|
|
||||||
showAllColumns.setAttribute("href", showAllColumnsUrl());
|
|
||||||
} else {
|
|
||||||
showAllColumns.parentNode.style.display = "none";
|
|
||||||
}
|
|
||||||
if (th.getAttribute("data-is-pk") != "1") {
|
|
||||||
hideColumn.parentNode.style.display = "block";
|
|
||||||
hideColumn.setAttribute("href", hideColumnUrl(column));
|
|
||||||
} else {
|
|
||||||
hideColumn.parentNode.style.display = "none";
|
|
||||||
}
|
|
||||||
/* Only show "Facet by this" if it's not the first column, not selected,
|
|
||||||
not a single PK and the Datasette allow_facet setting is True */
|
|
||||||
var displayedFacets = Array.from(
|
|
||||||
document.querySelectorAll(".facet-info"),
|
|
||||||
).map((el) => el.dataset.column);
|
|
||||||
var isFirstColumn =
|
|
||||||
th.parentElement.querySelector("th:first-of-type") == th;
|
|
||||||
var isSinglePk =
|
|
||||||
th.getAttribute("data-is-pk") == "1" &&
|
|
||||||
document.querySelectorAll('th[data-is-pk="1"]').length == 1;
|
|
||||||
if (
|
|
||||||
!DATASETTE_ALLOW_FACET ||
|
|
||||||
isFirstColumn ||
|
|
||||||
displayedFacets.includes(column) ||
|
|
||||||
isSinglePk
|
|
||||||
) {
|
|
||||||
facetItem.parentNode.style.display = "none";
|
|
||||||
} else {
|
|
||||||
facetItem.parentNode.style.display = "block";
|
|
||||||
facetItem.setAttribute("href", facetUrl(column));
|
|
||||||
}
|
|
||||||
/* Show notBlank option if not selected AND at least one visible blank value */
|
|
||||||
var tdsForThisColumn = Array.from(
|
|
||||||
th.closest("table").querySelectorAll("td." + th.className),
|
|
||||||
);
|
|
||||||
if (
|
|
||||||
params.get(`${column}__notblank`) != "1" &&
|
|
||||||
tdsForThisColumn.filter((el) => el.innerText.trim() == "").length
|
|
||||||
) {
|
|
||||||
notBlank.parentNode.style.display = "block";
|
|
||||||
notBlank.setAttribute("href", notBlankUrl(column));
|
|
||||||
} else {
|
|
||||||
notBlank.parentNode.style.display = "none";
|
|
||||||
}
|
|
||||||
var columnTypeP = menu.querySelector(".dropdown-column-type");
|
|
||||||
var columnType = th.dataset.columnType;
|
|
||||||
var notNull = th.dataset.columnNotNull == 1 ? " NOT NULL" : "";
|
|
||||||
|
|
||||||
if (columnType) {
|
|
||||||
columnTypeP.style.display = "block";
|
|
||||||
columnTypeP.innerText = `Type: ${columnType.toUpperCase()}${notNull}`;
|
|
||||||
} else {
|
|
||||||
columnTypeP.style.display = "none";
|
|
||||||
}
|
|
||||||
|
|
||||||
var columnDescriptionP = menu.querySelector(".dropdown-column-description");
|
|
||||||
if (th.dataset.columnDescription) {
|
|
||||||
columnDescriptionP.innerText = th.dataset.columnDescription;
|
|
||||||
columnDescriptionP.style.display = "block";
|
|
||||||
} else {
|
|
||||||
columnDescriptionP.style.display = "none";
|
|
||||||
}
|
|
||||||
menu.style.position = "absolute";
|
|
||||||
menu.style.top = menuTop + 6 + "px";
|
|
||||||
menu.style.left = menuLeft + "px";
|
|
||||||
menu.style.display = "block";
|
|
||||||
menu.classList.add("anim-scale-in");
|
|
||||||
|
|
||||||
// Custom menu items on each render
|
|
||||||
// Plugin hook: allow adding JS-based additional menu items
|
|
||||||
const columnActionsPayload = {
|
|
||||||
columnName: th.dataset.column,
|
|
||||||
columnNotNull: th.dataset.columnNotNull === "1",
|
|
||||||
columnType: th.dataset.columnType,
|
|
||||||
isPk: th.dataset.isPk === "1",
|
|
||||||
};
|
|
||||||
const columnItemConfigs = manager.makeColumnActions(columnActionsPayload);
|
|
||||||
|
|
||||||
const menuList = menu.querySelector("ul");
|
|
||||||
columnItemConfigs.forEach((itemConfig) => {
|
|
||||||
// Remove items from previous render. We assume entries have unique labels.
|
|
||||||
const existingItems = menuList.querySelectorAll(`li`);
|
|
||||||
Array.from(existingItems)
|
|
||||||
.filter((item) => item.innerText === itemConfig.label)
|
|
||||||
.forEach((node) => {
|
|
||||||
node.remove();
|
|
||||||
});
|
|
||||||
|
|
||||||
const newLink = document.createElement("a");
|
|
||||||
newLink.textContent = itemConfig.label;
|
|
||||||
newLink.href = itemConfig.href ?? "#";
|
|
||||||
if (itemConfig.onClick) {
|
|
||||||
newLink.onclick = itemConfig.onClick;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Attach new elements to DOM
|
|
||||||
const menuItem = document.createElement("li");
|
|
||||||
menuItem.appendChild(newLink);
|
|
||||||
menuList.appendChild(menuItem);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Measure width of menu and adjust position if too far right
|
|
||||||
const menuWidth = menu.offsetWidth;
|
|
||||||
const windowWidth = window.innerWidth;
|
|
||||||
if (menuLeft + menuWidth > windowWidth) {
|
|
||||||
menu.style.left = windowWidth - menuWidth - 20 + "px";
|
|
||||||
}
|
|
||||||
// Align menu .hook arrow with the column cog icon
|
|
||||||
const hook = menu.querySelector(".hook");
|
|
||||||
const icon = th.querySelector(".dropdown-menu-icon");
|
|
||||||
const iconRect = icon.getBoundingClientRect();
|
|
||||||
const hookLeft = iconRect.left - menuLeft + 1 + "px";
|
|
||||||
hook.style.left = hookLeft;
|
|
||||||
// Move the whole menu right if the hook is too far right
|
|
||||||
const menuRect = menu.getBoundingClientRect();
|
|
||||||
if (iconRect.right > menuRect.right) {
|
|
||||||
menu.style.left = iconRect.right - menuWidth + "px";
|
|
||||||
// And move hook tip as well
|
|
||||||
hook.style.left = menuWidth - 13 + "px";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var svg = document.createElement("div");
|
|
||||||
svg.innerHTML = DROPDOWN_ICON_SVG;
|
|
||||||
svg = svg.querySelector("*");
|
|
||||||
svg.classList.add("dropdown-menu-icon");
|
|
||||||
var menu = document.createElement("div");
|
|
||||||
menu.innerHTML = DROPDOWN_HTML;
|
|
||||||
menu = menu.querySelector("*");
|
|
||||||
menu.style.position = "absolute";
|
|
||||||
menu.style.display = "none";
|
|
||||||
document.body.appendChild(menu);
|
|
||||||
|
|
||||||
var ths = Array.from(
|
|
||||||
document.querySelectorAll(manager.selectors.tableHeaders),
|
|
||||||
);
|
|
||||||
ths.forEach((th) => {
|
|
||||||
if (!th.querySelector("a")) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
var icon = svg.cloneNode(true);
|
|
||||||
icon.addEventListener("click", onTableHeaderClick);
|
|
||||||
th.appendChild(icon);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
/* Add x buttons to the filter rows */
|
|
||||||
function addButtonsToFilterRows(manager) {
|
|
||||||
var x = "✖";
|
|
||||||
var rows = Array.from(
|
|
||||||
document.querySelectorAll(manager.selectors.filterRow),
|
|
||||||
).filter((el) => el.querySelector(".filter-op"));
|
|
||||||
rows.forEach((row) => {
|
|
||||||
var a = document.createElement("a");
|
|
||||||
a.setAttribute("href", "#");
|
|
||||||
a.setAttribute("aria-label", "Remove this filter");
|
|
||||||
a.style.textDecoration = "none";
|
|
||||||
a.innerText = x;
|
|
||||||
a.addEventListener("click", (ev) => {
|
|
||||||
ev.preventDefault();
|
|
||||||
let row = ev.target.closest("div");
|
|
||||||
row.querySelector("select").value = "";
|
|
||||||
row.querySelector(".filter-op select").value = "exact";
|
|
||||||
row.querySelector("input.filter-value").value = "";
|
|
||||||
ev.target.closest("a").style.display = "none";
|
|
||||||
});
|
|
||||||
row.appendChild(a);
|
|
||||||
var column = row.querySelector("select");
|
|
||||||
if (!column.value) {
|
|
||||||
a.style.display = "none";
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Set up datalist autocomplete for filter values */
|
|
||||||
function initAutocompleteForFilterValues(manager) {
|
|
||||||
function createDataLists() {
|
|
||||||
var facetResults = document.querySelectorAll(
|
|
||||||
manager.selectors.facetResults,
|
|
||||||
);
|
|
||||||
Array.from(facetResults).forEach(function (facetResult) {
|
|
||||||
// Use link text from all links in the facet result
|
|
||||||
var links = Array.from(
|
|
||||||
facetResult.querySelectorAll("li:not(.facet-truncated) a"),
|
|
||||||
);
|
|
||||||
// Create a datalist element
|
|
||||||
var datalist = document.createElement("datalist");
|
|
||||||
datalist.id = "datalist-" + facetResult.dataset.column;
|
|
||||||
// Create an option element for each link text
|
|
||||||
links.forEach(function (link) {
|
|
||||||
var option = document.createElement("option");
|
|
||||||
option.label = link.innerText;
|
|
||||||
option.value = link.dataset.facetValue;
|
|
||||||
datalist.appendChild(option);
|
|
||||||
});
|
|
||||||
// Add the datalist to the facet result
|
|
||||||
facetResult.appendChild(datalist);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
createDataLists();
|
|
||||||
// When any select with name=_filter_column changes, update the datalist
|
|
||||||
document.body.addEventListener("change", function (event) {
|
|
||||||
if (event.target.name === "_filter_column") {
|
|
||||||
event.target
|
|
||||||
.closest(manager.selectors.filterRow)
|
|
||||||
.querySelector(".filter-value")
|
|
||||||
.setAttribute("list", "datalist-" + event.target.value);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ensures Table UI is initialized only after the Manager is ready.
|
|
||||||
document.addEventListener("datasette_init", function (evt) {
|
|
||||||
const { detail: manager } = evt;
|
|
||||||
|
|
||||||
// Main table
|
|
||||||
initDatasetteTable(manager);
|
|
||||||
|
|
||||||
// Other UI functions with interactive JS needs
|
|
||||||
addButtonsToFilterRows(manager);
|
|
||||||
initAutocompleteForFilterValues(manager);
|
|
||||||
});
|
|
||||||
|
|
@ -3,6 +3,7 @@
|
||||||
{% block title %}{% if title %}{{ title }}{% else %}Error {{ status }}{% endif %}{% endblock %}
|
{% block title %}{% if title %}{{ title }}{% else %}Error {{ status }}{% endif %}{% endblock %}
|
||||||
|
|
||||||
{% block content %}
|
{% block content %}
|
||||||
|
<div class="hd"><a href="/">home</a></div>
|
||||||
|
|
||||||
<h1>{% if title %}{{ title }}{% else %}Error {{ status }}{% endif %}</h1>
|
<h1>{% if title %}{{ title }}{% else %}Error {{ status }}{% endif %}</h1>
|
||||||
|
|
||||||
|
|
@ -1,28 +0,0 @@
|
||||||
{% if action_links %}
|
|
||||||
<div class="page-action-menu">
|
|
||||||
<details class="actions-menu-links details-menu">
|
|
||||||
<summary>
|
|
||||||
<div class="icon-text">
|
|
||||||
<svg class="icon" aria-labelledby="actions-menu-links-title" role="img" style="color: #fff" xmlns="http://www.w3.org/2000/svg" width="28" height="28" viewBox="0 0 28 28" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
|
||||||
<title id="actions-menu-links-title">{{ action_title }}</title>
|
|
||||||
<circle cx="12" cy="12" r="3"></circle>
|
|
||||||
<path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z"></path>
|
|
||||||
</svg>
|
|
||||||
<span>{{ action_title }}</span>
|
|
||||||
</div>
|
|
||||||
</summary>
|
|
||||||
<div class="dropdown-menu">
|
|
||||||
<div class="hook"></div>
|
|
||||||
<ul>
|
|
||||||
{% for link in action_links %}
|
|
||||||
<li><a href="{{ link.href }}">{{ link.label }}
|
|
||||||
{% if link.description %}
|
|
||||||
<p class="dropdown-description">{{ link.description }}</p>
|
|
||||||
{% endif %}</a>
|
|
||||||
</li>
|
|
||||||
{% endfor %}
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</details>
|
|
||||||
</div>
|
|
||||||
{% endif %}
|
|
||||||
|
|
@ -1,16 +0,0 @@
|
||||||
<script>
|
|
||||||
document.body.addEventListener('click', (ev) => {
|
|
||||||
/* Close any open details elements that this click is outside of */
|
|
||||||
var target = ev.target;
|
|
||||||
var detailsClickedWithin = null;
|
|
||||||
while (target && target.tagName != 'DETAILS') {
|
|
||||||
target = target.parentNode;
|
|
||||||
}
|
|
||||||
if (target && target.tagName == 'DETAILS') {
|
|
||||||
detailsClickedWithin = target;
|
|
||||||
}
|
|
||||||
Array.from(document.querySelectorAll('details.details-menu')).filter(
|
|
||||||
(details) => details.open && details != detailsClickedWithin
|
|
||||||
).forEach(details => details.open = false);
|
|
||||||
});
|
|
||||||
</script>
|
|
||||||
|
|
@ -1,16 +1,7 @@
|
||||||
<script src="{{ base_url }}-/static/sql-formatter-2.3.3.min.js" defer></script>
|
<script src="/-/static/codemirror-5.31.0.js"></script>
|
||||||
<script src="{{ base_url }}-/static/cm-editor-6.0.1.bundle.js"></script>
|
<link rel="stylesheet" href="/-/static/codemirror-5.31.0-min.css" />
|
||||||
|
<script src="/-/static/codemirror-5.31.0-sql.min.js"></script>
|
||||||
<style>
|
<style>
|
||||||
.cm-editor {
|
.CodeMirror { height: auto; min-height: 70px; width: 80%; border: 1px solid #ddd; }
|
||||||
resize: both;
|
.CodeMirror-scroll { max-height: 200px; }
|
||||||
overflow: hidden;
|
|
||||||
width: 80%;
|
|
||||||
border: 1px solid #ddd;
|
|
||||||
}
|
|
||||||
/* Fix autocomplete icon positioning. The icon element gets border-box sizing set due to
|
|
||||||
the global reset, but this causes overlapping icon and text. Markup:
|
|
||||||
`<div class="cm-completionIcon cm-completionIcon-keyword" aria-hidden="true"></div>` */
|
|
||||||
.cm-completionIcon {
|
|
||||||
box-sizing: content-box;
|
|
||||||
}
|
|
||||||
</style>
|
</style>
|
||||||
|
|
|
||||||
|
|
@ -1,42 +1,13 @@
|
||||||
<script>
|
<script>
|
||||||
{% if table_columns %}
|
var editor = CodeMirror.fromTextArea(document.getElementById("sql-editor"), {
|
||||||
const schema = {{ table_columns|tojson(2) }};
|
lineNumbers: true,
|
||||||
{% else %}
|
mode: "text/x-sql",
|
||||||
const schema = {};
|
lineWrapping: true,
|
||||||
{% endif %}
|
});
|
||||||
|
editor.setOption("extraKeys", {
|
||||||
window.addEventListener("DOMContentLoaded", () => {
|
"Shift-Enter": function() {
|
||||||
const sqlFormat = document.querySelector("button#sql-format");
|
document.getElementsByClassName("sql")[0].submit();
|
||||||
const readOnly = document.querySelector("pre#sql-query");
|
},
|
||||||
const sqlInput = document.querySelector("textarea#sql-editor");
|
Tab: false
|
||||||
if (sqlFormat && !readOnly) {
|
});
|
||||||
sqlFormat.hidden = false;
|
|
||||||
}
|
|
||||||
if (sqlInput) {
|
|
||||||
var editor = (window.editor = cm.editorFromTextArea(sqlInput, {
|
|
||||||
schema,
|
|
||||||
}));
|
|
||||||
if (sqlFormat) {
|
|
||||||
sqlFormat.addEventListener("click", (ev) => {
|
|
||||||
const formatted = sqlFormatter.format(editor.state.doc.toString());
|
|
||||||
editor.dispatch({
|
|
||||||
changes: {
|
|
||||||
from: 0,
|
|
||||||
to: editor.state.doc.length,
|
|
||||||
insert: formatted,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (sqlFormat && readOnly) {
|
|
||||||
const formatted = sqlFormatter.format(readOnly.innerHTML);
|
|
||||||
if (formatted != readOnly.innerHTML) {
|
|
||||||
sqlFormat.hidden = false;
|
|
||||||
sqlFormat.addEventListener("click", (ev) => {
|
|
||||||
readOnly.innerHTML = formatted;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
</script>
|
</script>
|
||||||
|
|
|
||||||
|
|
@ -1,15 +0,0 @@
|
||||||
{% macro nav(request, database=None, table=None) -%}
|
|
||||||
{% if crumb_items is defined %}
|
|
||||||
{% set items=crumb_items(request=request, database=database, table=table) %}
|
|
||||||
{% if items %}
|
|
||||||
<p class="crumbs">
|
|
||||||
{% for item in items %}
|
|
||||||
<a href="{{ item.href }}">{{ item.label }}</a>
|
|
||||||
{% if not loop.last %}
|
|
||||||
/
|
|
||||||
{% endif %}
|
|
||||||
{% endfor %}
|
|
||||||
</p>
|
|
||||||
{% endif %}
|
|
||||||
{% endif %}
|
|
||||||
{%- endmacro %}
|
|
||||||
|
|
@ -1,50 +0,0 @@
|
||||||
<script>
|
|
||||||
// Common utility functions for debug pages
|
|
||||||
|
|
||||||
// Populate form from URL parameters on page load
|
|
||||||
function populateFormFromURL() {
|
|
||||||
const params = new URLSearchParams(window.location.search);
|
|
||||||
|
|
||||||
const action = params.get('action');
|
|
||||||
if (action) {
|
|
||||||
const actionField = document.getElementById('action');
|
|
||||||
if (actionField) {
|
|
||||||
actionField.value = action;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const parent = params.get('parent');
|
|
||||||
if (parent) {
|
|
||||||
const parentField = document.getElementById('parent');
|
|
||||||
if (parentField) {
|
|
||||||
parentField.value = parent;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const child = params.get('child');
|
|
||||||
if (child) {
|
|
||||||
const childField = document.getElementById('child');
|
|
||||||
if (childField) {
|
|
||||||
childField.value = child;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const pageSize = params.get('page_size');
|
|
||||||
if (pageSize) {
|
|
||||||
const pageSizeField = document.getElementById('page_size');
|
|
||||||
if (pageSizeField) {
|
|
||||||
pageSizeField.value = pageSize;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return params;
|
|
||||||
}
|
|
||||||
|
|
||||||
// HTML escape function
|
|
||||||
function escapeHtml(text) {
|
|
||||||
if (text === null || text === undefined) return '';
|
|
||||||
const div = document.createElement('div');
|
|
||||||
div.textContent = text;
|
|
||||||
return div.innerHTML;
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
{% if metadata.get("description_html") or metadata.get("description") %}
|
{% if metadata.description_html or metadata.description %}
|
||||||
<div class="metadata-description">
|
<div class="metadata-description">
|
||||||
{% if metadata.get("description_html") %}
|
{% if metadata.description_html %}
|
||||||
{{ metadata.description_html|safe }}
|
{{ metadata.description_html|safe }}
|
||||||
{% else %}
|
{% else %}
|
||||||
{{ metadata.description }}
|
{{ metadata.description }}
|
||||||
|
|
@ -21,7 +21,7 @@
|
||||||
<a href="{{ metadata.source_url }}">
|
<a href="{{ metadata.source_url }}">
|
||||||
{% endif %}{{ metadata.source or metadata.source_url }}{% if metadata.source_url %}</a>{% endif %}
|
{% endif %}{{ metadata.source or metadata.source_url }}{% if metadata.source_url %}</a>{% endif %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if metadata.about or metadata.about_url %}{% if metadata.license or metadata.license_url or metadata.source or metadata.source_url %}·{% endif %}
|
{% if metadata.about or metadata.about_url %}{% if metadata.license or metadata.license_url or metadata.source or metadat.source_url %}·{% endif %}
|
||||||
About: {% if metadata.about_url %}
|
About: {% if metadata.about_url %}
|
||||||
<a href="{{ metadata.about_url }}">
|
<a href="{{ metadata.about_url }}">
|
||||||
{% endif %}{{ metadata.about or metadata.about_url }}{% if metadata.about_url %}</a>{% endif %}
|
{% endif %}{{ metadata.about or metadata.about_url }}{% if metadata.about_url %}</a>{% endif %}
|
||||||
|
|
|
||||||
|
|
@ -1,28 +0,0 @@
|
||||||
<div class="facet-results">
|
|
||||||
{% for facet_info in sorted_facet_results %}
|
|
||||||
<div class="facet-info facet-{{ database|to_css_class }}-{{ table|to_css_class }}-{{ facet_info.name|to_css_class }}" id="facet-{{ facet_info.name|to_css_class }}" data-column="{{ facet_info.name }}">
|
|
||||||
<p class="facet-info-name">
|
|
||||||
<strong>{{ facet_info.name }}{% if facet_info.type != "column" %} ({{ facet_info.type }}){% endif %}
|
|
||||||
<span class="facet-info-total">{% if facet_info.truncated %}>{% endif %}{{ facet_info.results|length }}</span>
|
|
||||||
</strong>
|
|
||||||
{% if facet_info.hideable %}
|
|
||||||
<a href="{{ facet_info.toggle_url }}" class="cross">✖</a>
|
|
||||||
{% endif %}
|
|
||||||
</p>
|
|
||||||
<ul class="tight-bullets">
|
|
||||||
{% for facet_value in facet_info.results %}
|
|
||||||
{% if not facet_value.selected %}
|
|
||||||
<li><a href="{{ facet_value.toggle_url }}" data-facet-value="{{ facet_value.value }}">{{ (facet_value.label | string()) or "-" }}</a> {{ "{:,}".format(facet_value.count) }}</li>
|
|
||||||
{% else %}
|
|
||||||
<li>{{ facet_value.label or "-" }} · {{ "{:,}".format(facet_value.count) }} <a href="{{ facet_value.toggle_url }}" class="cross">✖</a></li>
|
|
||||||
{% endif %}
|
|
||||||
{% endfor %}
|
|
||||||
{% if facet_info.truncated %}
|
|
||||||
<li class="facet-truncated">{% if request.args._facet_size != "max" -%}
|
|
||||||
<a href="{{ path_with_replaced_args(request, {"_facet_size": "max"}) }}">…</a>{% else -%}…{% endif %}
|
|
||||||
</li>
|
|
||||||
{% endif %}
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
{% endfor %}
|
|
||||||
</div>
|
|
||||||
|
|
@ -1,21 +0,0 @@
|
||||||
Powered by <a href="https://datasette.io/" title="Datasette v{{ datasette_version }}">Datasette</a>
|
|
||||||
{% if query_ms %}· Queries took {{ query_ms|round(3) }}ms{% endif %}
|
|
||||||
{% if metadata %}
|
|
||||||
{% if metadata.license or metadata.license_url %}· Data license:
|
|
||||||
{% if metadata.license_url %}
|
|
||||||
<a href="{{ metadata.license_url }}">{{ metadata.license or metadata.license_url }}</a>
|
|
||||||
{% else %}
|
|
||||||
{{ metadata.license }}
|
|
||||||
{% endif %}
|
|
||||||
{% endif %}
|
|
||||||
{% if metadata.source or metadata.source_url %}·
|
|
||||||
Data source: {% if metadata.source_url %}
|
|
||||||
<a href="{{ metadata.source_url }}">
|
|
||||||
{% endif %}{{ metadata.source or metadata.source_url }}{% if metadata.source_url %}</a>{% endif %}
|
|
||||||
{% endif %}
|
|
||||||
{% if metadata.about or metadata.about_url %}·
|
|
||||||
About: {% if metadata.about_url %}
|
|
||||||
<a href="{{ metadata.about_url }}">
|
|
||||||
{% endif %}{{ metadata.about or metadata.about_url }}{% if metadata.about_url %}</a>{% endif %}
|
|
||||||
{% endif %}
|
|
||||||
{% endif %}
|
|
||||||
|
|
@ -1,145 +0,0 @@
|
||||||
<style>
|
|
||||||
.permission-form {
|
|
||||||
background-color: #f5f5f5;
|
|
||||||
border: 1px solid #ddd;
|
|
||||||
border-radius: 5px;
|
|
||||||
padding: 1.5em;
|
|
||||||
margin-bottom: 2em;
|
|
||||||
}
|
|
||||||
.form-section {
|
|
||||||
margin-bottom: 1em;
|
|
||||||
}
|
|
||||||
.form-section label {
|
|
||||||
display: block;
|
|
||||||
margin-bottom: 0.3em;
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
.form-section input[type="text"],
|
|
||||||
.form-section select {
|
|
||||||
width: 100%;
|
|
||||||
max-width: 500px;
|
|
||||||
padding: 0.5em;
|
|
||||||
box-sizing: border-box;
|
|
||||||
border: 1px solid #ccc;
|
|
||||||
border-radius: 3px;
|
|
||||||
}
|
|
||||||
.form-section input[type="text"]:focus,
|
|
||||||
.form-section select:focus {
|
|
||||||
outline: 2px solid #0066cc;
|
|
||||||
border-color: #0066cc;
|
|
||||||
}
|
|
||||||
.form-section small {
|
|
||||||
display: block;
|
|
||||||
margin-top: 0.3em;
|
|
||||||
color: #666;
|
|
||||||
}
|
|
||||||
.form-actions {
|
|
||||||
margin-top: 1em;
|
|
||||||
}
|
|
||||||
.submit-btn {
|
|
||||||
padding: 0.6em 1.5em;
|
|
||||||
font-size: 1em;
|
|
||||||
background-color: #0066cc;
|
|
||||||
color: white;
|
|
||||||
border: none;
|
|
||||||
border-radius: 3px;
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
.submit-btn:hover {
|
|
||||||
background-color: #0052a3;
|
|
||||||
}
|
|
||||||
.submit-btn:disabled {
|
|
||||||
background-color: #ccc;
|
|
||||||
cursor: not-allowed;
|
|
||||||
}
|
|
||||||
.results-container {
|
|
||||||
margin-top: 2em;
|
|
||||||
}
|
|
||||||
.results-header {
|
|
||||||
display: flex;
|
|
||||||
justify-content: space-between;
|
|
||||||
align-items: center;
|
|
||||||
margin-bottom: 1em;
|
|
||||||
}
|
|
||||||
.results-count {
|
|
||||||
font-size: 0.9em;
|
|
||||||
color: #666;
|
|
||||||
}
|
|
||||||
.results-table {
|
|
||||||
width: 100%;
|
|
||||||
border-collapse: collapse;
|
|
||||||
background-color: white;
|
|
||||||
box-shadow: 0 1px 3px rgba(0,0,0,0.1);
|
|
||||||
}
|
|
||||||
.results-table th {
|
|
||||||
background-color: #f5f5f5;
|
|
||||||
padding: 0.75em;
|
|
||||||
text-align: left;
|
|
||||||
font-weight: bold;
|
|
||||||
border-bottom: 2px solid #ddd;
|
|
||||||
}
|
|
||||||
.results-table td {
|
|
||||||
padding: 0.75em;
|
|
||||||
border-bottom: 1px solid #eee;
|
|
||||||
}
|
|
||||||
.results-table tr:hover {
|
|
||||||
background-color: #f9f9f9;
|
|
||||||
}
|
|
||||||
.results-table tr.allow-row {
|
|
||||||
background-color: #f1f8f4;
|
|
||||||
}
|
|
||||||
.results-table tr.allow-row:hover {
|
|
||||||
background-color: #e8f5e9;
|
|
||||||
}
|
|
||||||
.results-table tr.deny-row {
|
|
||||||
background-color: #fef5f5;
|
|
||||||
}
|
|
||||||
.results-table tr.deny-row:hover {
|
|
||||||
background-color: #ffebee;
|
|
||||||
}
|
|
||||||
.resource-path {
|
|
||||||
font-family: monospace;
|
|
||||||
background-color: #f5f5f5;
|
|
||||||
padding: 0.2em 0.4em;
|
|
||||||
border-radius: 3px;
|
|
||||||
}
|
|
||||||
.pagination {
|
|
||||||
margin-top: 1.5em;
|
|
||||||
display: flex;
|
|
||||||
gap: 1em;
|
|
||||||
align-items: center;
|
|
||||||
}
|
|
||||||
.pagination a {
|
|
||||||
padding: 0.5em 1em;
|
|
||||||
background-color: #0066cc;
|
|
||||||
color: white;
|
|
||||||
text-decoration: none;
|
|
||||||
border-radius: 3px;
|
|
||||||
}
|
|
||||||
.pagination a:hover {
|
|
||||||
background-color: #0052a3;
|
|
||||||
}
|
|
||||||
.pagination span {
|
|
||||||
color: #666;
|
|
||||||
}
|
|
||||||
.no-results {
|
|
||||||
padding: 2em;
|
|
||||||
text-align: center;
|
|
||||||
color: #666;
|
|
||||||
background-color: #f9f9f9;
|
|
||||||
border: 1px solid #ddd;
|
|
||||||
border-radius: 5px;
|
|
||||||
}
|
|
||||||
.error-message {
|
|
||||||
padding: 1em;
|
|
||||||
background-color: #ffebee;
|
|
||||||
border: 2px solid #f44336;
|
|
||||||
border-radius: 5px;
|
|
||||||
color: #c62828;
|
|
||||||
}
|
|
||||||
.loading {
|
|
||||||
padding: 2em;
|
|
||||||
text-align: center;
|
|
||||||
color: #666;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
|
|
@ -1,54 +0,0 @@
|
||||||
{% if has_debug_permission %}
|
|
||||||
{% set query_string = '?' + request.query_string if request.query_string else '' %}
|
|
||||||
|
|
||||||
<style>
|
|
||||||
.permissions-debug-tabs {
|
|
||||||
border-bottom: 2px solid #e0e0e0;
|
|
||||||
margin-bottom: 2em;
|
|
||||||
display: flex;
|
|
||||||
flex-wrap: wrap;
|
|
||||||
gap: 0.5em;
|
|
||||||
}
|
|
||||||
.permissions-debug-tabs a {
|
|
||||||
padding: 0.75em 1.25em;
|
|
||||||
text-decoration: none;
|
|
||||||
color: #333;
|
|
||||||
border-bottom: 3px solid transparent;
|
|
||||||
margin-bottom: -2px;
|
|
||||||
transition: all 0.2s;
|
|
||||||
font-weight: 500;
|
|
||||||
}
|
|
||||||
.permissions-debug-tabs a:hover {
|
|
||||||
background-color: #f5f5f5;
|
|
||||||
border-bottom-color: #999;
|
|
||||||
}
|
|
||||||
.permissions-debug-tabs a.active {
|
|
||||||
color: #0066cc;
|
|
||||||
border-bottom-color: #0066cc;
|
|
||||||
background-color: #f0f7ff;
|
|
||||||
}
|
|
||||||
@media only screen and (max-width: 576px) {
|
|
||||||
.permissions-debug-tabs {
|
|
||||||
flex-direction: column;
|
|
||||||
gap: 0;
|
|
||||||
}
|
|
||||||
.permissions-debug-tabs a {
|
|
||||||
border-bottom: 1px solid #e0e0e0;
|
|
||||||
margin-bottom: 0;
|
|
||||||
}
|
|
||||||
.permissions-debug-tabs a.active {
|
|
||||||
border-left: 3px solid #0066cc;
|
|
||||||
border-bottom: 1px solid #e0e0e0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
|
|
||||||
<nav class="permissions-debug-tabs">
|
|
||||||
<a href="{{ urls.path('-/permissions') }}" {% if current_tab == "permissions" %}class="active"{% endif %}>Playground</a>
|
|
||||||
<a href="{{ urls.path('-/check') }}{{ query_string }}" {% if current_tab == "check" %}class="active"{% endif %}>Check</a>
|
|
||||||
<a href="{{ urls.path('-/allowed') }}{{ query_string }}" {% if current_tab == "allowed" %}class="active"{% endif %}>Allowed</a>
|
|
||||||
<a href="{{ urls.path('-/rules') }}{{ query_string }}" {% if current_tab == "rules" %}class="active"{% endif %}>Rules</a>
|
|
||||||
<a href="{{ urls.path('-/actions') }}" {% if current_tab == "actions" %}class="active"{% endif %}>Actions</a>
|
|
||||||
<a href="{{ urls.path('-/allow-debug') }}" {% if current_tab == "allow_debug" %}class="active"{% endif %}>Allow debug</a>
|
|
||||||
</nav>
|
|
||||||
{% endif %}
|
|
||||||
|
|
@ -1,3 +0,0 @@
|
||||||
<p class="suggested-facets">
|
|
||||||
Suggested facets: {% for facet in suggested_facets %}<a href="{{ facet.toggle_url }}#facet-{{ facet.name|to_css_class }}">{{ facet.name }}</a>{% if facet.get("type") %} ({{ facet.type }}){% endif %}{% if not loop.last %}, {% endif %}{% endfor %}
|
|
||||||
</p>
|
|
||||||
|
|
@ -1,36 +1,28 @@
|
||||||
<!-- above-table-panel is a hook node for plugins to attach to . Displays even if no data available -->
|
<table class="rows-and-columns">
|
||||||
<div class="above-table-panel"> </div>
|
<thead>
|
||||||
{% if display_rows %}
|
<tr>
|
||||||
<div class="table-wrapper">
|
{% for column in display_columns %}
|
||||||
<table class="rows-and-columns">
|
<th class="col-{{ column.name|to_css_class }}" scope="col">
|
||||||
<thead>
|
{% if not column.sortable %}
|
||||||
<tr>
|
{{ column.name }}
|
||||||
{% for column in display_columns %}
|
{% else %}
|
||||||
<th {% if column.description %}data-column-description="{{ column.description }}" {% endif %}class="col-{{ column.name|to_css_class }}" scope="col" data-column="{{ column.name }}" data-column-type="{{ column.type.lower() }}" data-column-not-null="{{ column.notnull }}" data-is-pk="{% if column.is_pk %}1{% else %}0{% endif %}">
|
{% if column.name == sort %}
|
||||||
{% if not column.sortable %}
|
<a href="{{ path_with_replaced_args(request, {'_sort_desc': column.name, '_sort': None, '_next': None}) }}" rel="nofollow">{{ column.name }} ▼</a>
|
||||||
{{ column.name }}
|
|
||||||
{% else %}
|
{% else %}
|
||||||
{% if column.name == sort %}
|
<a href="{{ path_with_replaced_args(request, {'_sort': column.name, '_sort_desc': None, '_next': None}) }}" rel="nofollow">{{ column.name }}{% if column.name == sort_desc %} ▲{% endif %}</a>
|
||||||
<a href="{{ fix_path(path_with_replaced_args(request, {'_sort_desc': column.name, '_sort': None, '_next': None})) }}" rel="nofollow">{{ column.name }} ▼</a>
|
|
||||||
{% else %}
|
|
||||||
<a href="{{ fix_path(path_with_replaced_args(request, {'_sort': column.name, '_sort_desc': None, '_next': None})) }}" rel="nofollow">{{ column.name }}{% if column.name == sort_desc %} ▲{% endif %}</a>
|
|
||||||
{% endif %}
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</th>
|
{% endif %}
|
||||||
{% endfor %}
|
</th>
|
||||||
</tr>
|
{% endfor %}
|
||||||
</thead>
|
</tr>
|
||||||
<tbody>
|
</thead>
|
||||||
{% for row in display_rows %}
|
<tbody>
|
||||||
<tr>
|
{% for row in display_rows %}
|
||||||
{% for cell in row %}
|
<tr>
|
||||||
<td class="col-{{ cell.column|to_css_class }} type-{{ cell.value_type }}">{{ cell.value }}</td>
|
{% for cell in row %}
|
||||||
{% endfor %}
|
<td class="col-{{ cell.column|to_css_class }}">{{ cell.value }}</td>
|
||||||
</tr>
|
{% endfor %}
|
||||||
{% endfor %}
|
</tr>
|
||||||
</tbody>
|
{% endfor %}
|
||||||
</table>
|
</tbody>
|
||||||
</div>
|
</table>
|
||||||
{% else %}
|
|
||||||
<p class="zero-results">0 records</p>
|
|
||||||
{% endif %}
|
|
||||||
|
|
|
||||||
|
|
@ -1,61 +0,0 @@
|
||||||
{% extends "base.html" %}
|
|
||||||
|
|
||||||
{% block title %}Debug allow rules{% endblock %}
|
|
||||||
|
|
||||||
{% block extra_head %}
|
|
||||||
<style>
|
|
||||||
textarea {
|
|
||||||
height: 10em;
|
|
||||||
width: 95%;
|
|
||||||
box-sizing: border-box;
|
|
||||||
padding: 0.5em;
|
|
||||||
border: 2px dotted black;
|
|
||||||
}
|
|
||||||
.two-col {
|
|
||||||
display: inline-block;
|
|
||||||
width: 48%;
|
|
||||||
}
|
|
||||||
.two-col label {
|
|
||||||
width: 48%;
|
|
||||||
}
|
|
||||||
p.message-warning {
|
|
||||||
white-space: pre-wrap;
|
|
||||||
}
|
|
||||||
@media only screen and (max-width: 576px) {
|
|
||||||
.two-col {
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
{% endblock %}
|
|
||||||
|
|
||||||
{% block content %}
|
|
||||||
|
|
||||||
<h1>Debug allow rules</h1>
|
|
||||||
|
|
||||||
{% set current_tab = "allow_debug" %}
|
|
||||||
{% include "_permissions_debug_tabs.html" %}
|
|
||||||
|
|
||||||
<p>Use this tool to try out different actor and allow combinations. See <a href="https://docs.datasette.io/en/stable/authentication.html#defining-permissions-with-allow-blocks">Defining permissions with "allow" blocks</a> for documentation.</p>
|
|
||||||
|
|
||||||
<form class="core" action="{{ urls.path('-/allow-debug') }}" method="get" style="margin-bottom: 1em">
|
|
||||||
<div class="two-col">
|
|
||||||
<p><label>Allow block</label></p>
|
|
||||||
<textarea name="allow">{{ allow_input }}</textarea>
|
|
||||||
</div>
|
|
||||||
<div class="two-col">
|
|
||||||
<p><label>Actor</label></p>
|
|
||||||
<textarea name="actor">{{ actor_input }}</textarea>
|
|
||||||
</div>
|
|
||||||
<div style="margin-top: 1em;">
|
|
||||||
<input type="submit" value="Apply allow block to actor">
|
|
||||||
</div>
|
|
||||||
</form>
|
|
||||||
|
|
||||||
{% if error %}<p class="message-warning">{{ error }}</p>{% endif %}
|
|
||||||
|
|
||||||
{% if result == "True" %}<p class="message-info">Result: allow</p>{% endif %}
|
|
||||||
|
|
||||||
{% if result == "False" %}<p class="message-error">Result: deny</p>{% endif %}
|
|
||||||
|
|
||||||
{% endblock %}
|
|
||||||
|
|
@ -1,208 +0,0 @@
|
||||||
{% extends "base.html" %}
|
|
||||||
|
|
||||||
{% block title %}API Explorer{% endblock %}
|
|
||||||
|
|
||||||
{% block extra_head %}
|
|
||||||
<script src="{{ base_url }}-/static/json-format-highlight-1.0.1.js"></script>
|
|
||||||
{% endblock %}
|
|
||||||
|
|
||||||
{% block content %}
|
|
||||||
|
|
||||||
<h1>API Explorer{% if private %} 🔒{% endif %}</h1>
|
|
||||||
|
|
||||||
<p>Use this tool to try out the
|
|
||||||
{% if datasette_version %}
|
|
||||||
<a href="https://docs.datasette.io/en/{{ datasette_version }}/json_api.html">Datasette API</a>.
|
|
||||||
{% else %}
|
|
||||||
Datasette API.
|
|
||||||
{% endif %}
|
|
||||||
</p>
|
|
||||||
<details open style="border: 2px solid #ccc; border-bottom: none; padding: 0.5em">
|
|
||||||
<summary style="cursor: pointer;">GET</summary>
|
|
||||||
<form class="core" method="get" id="api-explorer-get" style="margin-top: 0.7em">
|
|
||||||
<div>
|
|
||||||
<label for="path">API path:</label>
|
|
||||||
<input type="text" id="path" name="path" style="width: 60%">
|
|
||||||
<input type="submit" value="GET">
|
|
||||||
</div>
|
|
||||||
</form>
|
|
||||||
</details>
|
|
||||||
<details style="border: 2px solid #ccc; padding: 0.5em">
|
|
||||||
<summary style="cursor: pointer">POST</summary>
|
|
||||||
<form class="core" method="post" id="api-explorer-post" style="margin-top: 0.7em">
|
|
||||||
<div>
|
|
||||||
<label for="path">API path:</label>
|
|
||||||
<input type="text" id="path" name="path" style="width: 60%">
|
|
||||||
</div>
|
|
||||||
<div style="margin: 0.5em 0">
|
|
||||||
<label for="apiJson" style="vertical-align: top">JSON:</label>
|
|
||||||
<textarea id="apiJson" name="json" style="width: 60%; height: 200px; font-family: monospace; font-size: 0.8em;"></textarea>
|
|
||||||
</div>
|
|
||||||
<p><button id="json-format" type="button">Format JSON</button> <input type="submit" value="POST"></p>
|
|
||||||
</form>
|
|
||||||
</details>
|
|
||||||
|
|
||||||
<div id="output" style="display: none">
|
|
||||||
<h2>API response: HTTP <span id="response-status"></span></h2>
|
|
||||||
</h2>
|
|
||||||
<ul class="errors message-error"></ul>
|
|
||||||
<pre></pre>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<script>
|
|
||||||
document.querySelector('#json-format').addEventListener('click', (ev) => {
|
|
||||||
ev.preventDefault();
|
|
||||||
let json = document.querySelector('textarea[name="json"]').value.trim();
|
|
||||||
if (!json) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
const parsed = JSON.parse(json);
|
|
||||||
document.querySelector('textarea[name="json"]').value = JSON.stringify(parsed, null, 2);
|
|
||||||
} catch (e) {
|
|
||||||
alert("Error parsing JSON: " + e);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
var postForm = document.getElementById('api-explorer-post');
|
|
||||||
var getForm = document.getElementById('api-explorer-get');
|
|
||||||
var output = document.getElementById('output');
|
|
||||||
var errorList = output.querySelector('.errors');
|
|
||||||
|
|
||||||
// On first load or fragment change populate forms from # in URL, if present
|
|
||||||
if (window.location.hash) {
|
|
||||||
onFragmentChange();
|
|
||||||
}
|
|
||||||
function onFragmentChange() {
|
|
||||||
var hash = window.location.hash.slice(1);
|
|
||||||
// Treat hash as a foo=bar string and parse it:
|
|
||||||
var params = new URLSearchParams(hash);
|
|
||||||
var method = params.get('method');
|
|
||||||
if (method == 'GET') {
|
|
||||||
getForm.closest('details').open = true;
|
|
||||||
postForm.closest('details').open = false;
|
|
||||||
getForm.querySelector('input[name="path"]').value = params.get('path');
|
|
||||||
} else if (method == 'POST') {
|
|
||||||
postForm.closest('details').open = true;
|
|
||||||
getForm.closest('details').open = false;
|
|
||||||
postForm.querySelector('input[name="path"]').value = params.get('path');
|
|
||||||
postForm.querySelector('textarea[name="json"]').value = params.get('json');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
window.addEventListener('hashchange', () => {
|
|
||||||
onFragmentChange();
|
|
||||||
// Animate scroll to top of page
|
|
||||||
window.scrollTo({top: 0, behavior: 'smooth'});
|
|
||||||
});
|
|
||||||
|
|
||||||
// Cause GET and POST regions to toggle each other
|
|
||||||
var getDetails = getForm.closest('details');
|
|
||||||
var postDetails = postForm.closest('details');
|
|
||||||
getDetails.addEventListener('toggle', (ev) => {
|
|
||||||
if (getDetails.open) {
|
|
||||||
postDetails.open = false;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
postDetails.addEventListener('toggle', (ev) => {
|
|
||||||
if (postDetails.open) {
|
|
||||||
getDetails.open = false;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
getForm.addEventListener("submit", (ev) => {
|
|
||||||
ev.preventDefault();
|
|
||||||
var formData = new FormData(getForm);
|
|
||||||
// Update URL fragment hash
|
|
||||||
var serialized = new URLSearchParams(formData).toString() + '&method=GET';
|
|
||||||
window.history.pushState({}, "", location.pathname + '#' + serialized);
|
|
||||||
// Send the request
|
|
||||||
var path = formData.get('path');
|
|
||||||
fetch(path, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: {
|
|
||||||
'Accept': 'application/json',
|
|
||||||
}
|
|
||||||
}).then((response) => {
|
|
||||||
output.style.display = 'block';
|
|
||||||
document.getElementById('response-status').textContent = response.status;
|
|
||||||
return response.json();
|
|
||||||
}).then((data) => {
|
|
||||||
output.querySelector('pre').innerHTML = jsonFormatHighlight(data);
|
|
||||||
errorList.style.display = 'none';
|
|
||||||
}).catch((error) => {
|
|
||||||
alert(error);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
postForm.addEventListener("submit", (ev) => {
|
|
||||||
ev.preventDefault();
|
|
||||||
var formData = new FormData(postForm);
|
|
||||||
// Update URL fragment hash
|
|
||||||
var serialized = new URLSearchParams(formData).toString() + '&method=POST';
|
|
||||||
window.history.pushState({}, "", location.pathname + '#' + serialized);
|
|
||||||
// Send the request
|
|
||||||
var json = formData.get('json');
|
|
||||||
var path = formData.get('path');
|
|
||||||
// Validate JSON
|
|
||||||
if (!json.length) {
|
|
||||||
json = '{}';
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
var data = JSON.parse(json);
|
|
||||||
} catch (err) {
|
|
||||||
alert("Invalid JSON: " + err);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
// POST JSON to path with content-type application/json
|
|
||||||
fetch(path, {
|
|
||||||
method: 'POST',
|
|
||||||
body: json,
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
}
|
|
||||||
}).then(r => {
|
|
||||||
document.getElementById('response-status').textContent = r.status;
|
|
||||||
return r.json();
|
|
||||||
}).then(data => {
|
|
||||||
if (data.errors) {
|
|
||||||
errorList.style.display = 'block';
|
|
||||||
errorList.innerHTML = '';
|
|
||||||
data.errors.forEach(error => {
|
|
||||||
var li = document.createElement('li');
|
|
||||||
li.textContent = error;
|
|
||||||
errorList.appendChild(li);
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
errorList.style.display = 'none';
|
|
||||||
}
|
|
||||||
output.querySelector('pre').innerHTML = jsonFormatHighlight(data);
|
|
||||||
output.style.display = 'block';
|
|
||||||
}).catch(err => {
|
|
||||||
alert("Error: " + err);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
</script>
|
|
||||||
|
|
||||||
{% if example_links %}
|
|
||||||
<h2>API endpoints</h2>
|
|
||||||
<ul class="bullets">
|
|
||||||
{% for database in example_links %}
|
|
||||||
<li>Database: <strong>{{ database.name }}</strong></li>
|
|
||||||
<ul class="bullets">
|
|
||||||
{% for link in database.links %}
|
|
||||||
<li><a href="{{ api_path(link) }}">{{ link.path }}</a> - {{ link.label }} </li>
|
|
||||||
{% endfor %}
|
|
||||||
{% for table in database.tables %}
|
|
||||||
<li><strong>{{ table.name }}</strong>
|
|
||||||
<ul class="bullets">
|
|
||||||
{% for link in table.links %}
|
|
||||||
<li><a href="{{ api_path(link) }}">{{ link.path }}</a> - {{ link.label }} </li>
|
|
||||||
{% endfor %}
|
|
||||||
</ul>
|
|
||||||
</li>
|
|
||||||
{% endfor %}
|
|
||||||
</ul>
|
|
||||||
{% endfor %}
|
|
||||||
</ul>
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% endblock %}
|
|
||||||
|
|
@ -1,78 +1,50 @@
|
||||||
{% import "_crumbs.html" as crumbs with context %}<!DOCTYPE html>
|
<!DOCTYPE html>
|
||||||
<html lang="en">
|
<html>
|
||||||
<head>
|
<head>
|
||||||
<title>{% block title %}{% endblock %}</title>
|
<title>{% block title %}{% endblock %}</title>
|
||||||
<link rel="stylesheet" href="{{ urls.static('app.css') }}?{{ app_css_hash }}">
|
<link rel="stylesheet" href="/-/static/app.css?{{ app_css_hash }}">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
|
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
|
||||||
{% for url in extra_css_urls %}
|
{% for url in extra_css_urls %}
|
||||||
<link rel="stylesheet" href="{{ url.url }}"{% if url.get("sri") %} integrity="{{ url.sri }}" crossorigin="anonymous"{% endif %}>
|
<link rel="stylesheet" href="{{ url.url }}"{% if url.sri %} integrity="{{ url.sri }}" crossorigin="anonymous"{% endif %}>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
<script>window.datasetteVersion = '{{ datasette_version }}';</script>
|
|
||||||
<script src="{{ urls.static('datasette-manager.js') }}" defer></script>
|
|
||||||
{% for url in extra_js_urls %}
|
{% for url in extra_js_urls %}
|
||||||
<script {% if url.module %}type="module" {% endif %}src="{{ url.url }}"{% if url.get("sri") %} integrity="{{ url.sri }}" crossorigin="anonymous"{% endif %}></script>
|
<script src="{{ url.url }}"{% if url.sri %} integrity="{{ url.sri }}" crossorigin="anonymous"{% endif %}></script>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
{%- if alternate_url_json -%}
|
{% block extra_head %}{% endblock %}
|
||||||
<link rel="alternate" type="application/json+datasette" href="{{ alternate_url_json }}">
|
|
||||||
{%- endif -%}
|
|
||||||
{%- block extra_head %}{% endblock -%}
|
|
||||||
</head>
|
</head>
|
||||||
<body class="{% block body_class %}{% endblock %}">
|
<body class="{% block body_class %}{% endblock %}">
|
||||||
<div class="not-footer">
|
|
||||||
<header class="hd"><nav>{% block nav %}{% block crumbs %}{{ crumbs.nav(request=request) }}{% endblock %}
|
|
||||||
{% set links = menu_links() %}{% if links or show_logout %}
|
|
||||||
<details class="nav-menu details-menu">
|
|
||||||
<summary><svg aria-labelledby="nav-menu-svg-title" role="img"
|
|
||||||
fill="currentColor" stroke="currentColor" xmlns="http://www.w3.org/2000/svg"
|
|
||||||
viewBox="0 0 16 16" width="16" height="16">
|
|
||||||
<title id="nav-menu-svg-title">Menu</title>
|
|
||||||
<path fill-rule="evenodd" d="M1 2.75A.75.75 0 011.75 2h12.5a.75.75 0 110 1.5H1.75A.75.75 0 011 2.75zm0 5A.75.75 0 011.75 7h12.5a.75.75 0 110 1.5H1.75A.75.75 0 011 7.75zM1.75 12a.75.75 0 100 1.5h12.5a.75.75 0 100-1.5H1.75z"></path>
|
|
||||||
</svg></summary>
|
|
||||||
<div class="nav-menu-inner">
|
|
||||||
{% if links %}
|
|
||||||
<ul>
|
|
||||||
{% for link in links %}
|
|
||||||
<li><a href="{{ link.href }}">{{ link.label }}</a></li>
|
|
||||||
{% endfor %}
|
|
||||||
</ul>
|
|
||||||
{% endif %}
|
|
||||||
{% if show_logout %}
|
|
||||||
<form class="nav-menu-logout" action="{{ urls.logout() }}" method="post">
|
|
||||||
<input type="hidden" name="csrftoken" value="{{ csrftoken() }}">
|
|
||||||
<button class="button-as-link">Log out</button>
|
|
||||||
</form>{% endif %}
|
|
||||||
</div>
|
|
||||||
</details>{% endif %}
|
|
||||||
{% if actor %}
|
|
||||||
<div class="actor">
|
|
||||||
<strong>{{ display_actor(actor) }}</strong>
|
|
||||||
</div>
|
|
||||||
{% endif %}
|
|
||||||
{% endblock %}</nav></header>
|
|
||||||
|
|
||||||
{% block messages %}
|
|
||||||
{% if show_messages %}
|
|
||||||
{% for message, message_type in show_messages() %}
|
|
||||||
<p class="message-{% if message_type == 1 %}info{% elif message_type == 2 %}warning{% elif message_type == 3 %}error{% endif %}">{{ message }}</p>
|
|
||||||
{% endfor %}
|
|
||||||
{% endif %}
|
|
||||||
{% endblock %}
|
|
||||||
|
|
||||||
<section class="content">
|
|
||||||
{% block content %}
|
{% block content %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
</section>
|
|
||||||
</div>
|
|
||||||
<footer class="ft">{% block footer %}{% include "_footer.html" %}{% endblock %}</footer>
|
|
||||||
|
|
||||||
{% include "_close_open_menus.html" %}
|
<div class="ft">
|
||||||
|
Powered by <a href="https://github.com/simonw/datasette" title="Datasette v{{ datasette_version }}">Datasette</a>
|
||||||
|
{% if query_ms %}· Query took {{ query_ms|round(3) }}ms{% endif %}
|
||||||
|
{% if metadata %}
|
||||||
|
{% if metadata.license or metadata.license_url %}· Data license:
|
||||||
|
{% if metadata.license_url %}
|
||||||
|
<a href="{{ metadata.license_url }}">{{ metadata.license or metadata.license_url }}</a>
|
||||||
|
{% else %}
|
||||||
|
{{ metadata.license }}
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
{% if metadata.source or metadata.source_url %}·
|
||||||
|
Data source: {% if metadata.source_url %}
|
||||||
|
<a href="{{ metadata.source_url }}">
|
||||||
|
{% endif %}{{ metadata.source or metadata.source_url }}{% if metadata.source_url %}</a>{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
{% if metadata.about or metadata.about_url %}·
|
||||||
|
About: {% if metadata.about_url %}
|
||||||
|
<a href="{{ metadata.about_url }}">
|
||||||
|
{% endif %}{{ metadata.about or metadata.about_url }}{% if metadata.about_url %}</a>{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
|
||||||
{% for body_script in body_scripts %}
|
{% for body_script in body_scripts %}
|
||||||
<script{% if body_script.module %} type="module"{% endif %}>{{ body_script.script }}</script>
|
<script>{{ body_script }}</script>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
|
||||||
{% if select_templates %}<!-- Templates considered: {{ select_templates|join(", ") }} -->{% endif %}
|
{% if select_templates %}<!-- Templates considered: {{ select_templates|join(", ") }} -->{% endif %}
|
||||||
<script src="{{ urls.static('navigation-search.js') }}" defer></script>
|
|
||||||
<navigation-search url="/-/tables"></navigation-search>
|
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|
|
||||||
|
|
@ -1,124 +0,0 @@
|
||||||
{% extends "base.html" %}
|
|
||||||
|
|
||||||
{% block title %}Create an API token{% endblock %}
|
|
||||||
|
|
||||||
{% block extra_head %}
|
|
||||||
<style type="text/css">
|
|
||||||
#restrict-permissions label {
|
|
||||||
display: inline;
|
|
||||||
width: 90%;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
{% endblock %}
|
|
||||||
|
|
||||||
{% block content %}
|
|
||||||
|
|
||||||
<h1>Create an API token</h1>
|
|
||||||
|
|
||||||
<p>This token will allow API access with the same abilities as your current user, <strong>{{ request.actor.id }}</strong></p>
|
|
||||||
|
|
||||||
{% if token %}
|
|
||||||
<div>
|
|
||||||
<h2>Your API token</h2>
|
|
||||||
<form>
|
|
||||||
<input type="text" class="copyable" style="width: 40%" value="{{ token }}">
|
|
||||||
<span class="copy-link-wrapper"></span>
|
|
||||||
</form>
|
|
||||||
<!--- show token in a <details> -->
|
|
||||||
<details style="margin-top: 1em">
|
|
||||||
<summary>Token details</summary>
|
|
||||||
<pre>{{ token_bits|tojson(4) }}</pre>
|
|
||||||
</details>
|
|
||||||
</div>
|
|
||||||
<h2>Create another token</h2>
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% if errors %}
|
|
||||||
{% for error in errors %}
|
|
||||||
<p class="message-error">{{ error }}</p>
|
|
||||||
{% endfor %}
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
<form class="core" action="{{ urls.path('-/create-token') }}" method="post">
|
|
||||||
<div>
|
|
||||||
<div class="select-wrapper" style="width: unset">
|
|
||||||
<select name="expire_type">
|
|
||||||
<option value="">Token never expires</option>
|
|
||||||
<option value="minutes">Expires after X minutes</option>
|
|
||||||
<option value="hours">Expires after X hours</option>
|
|
||||||
<option value="days">Expires after X days</option>
|
|
||||||
</select>
|
|
||||||
</div>
|
|
||||||
<input type="text" name="expire_duration" style="width: 10%">
|
|
||||||
<input type="hidden" name="csrftoken" value="{{ csrftoken() }}">
|
|
||||||
<input type="submit" value="Create token">
|
|
||||||
|
|
||||||
<details style="margin-top: 1em" id="restrict-permissions">
|
|
||||||
<summary style="cursor: pointer;">Restrict actions that can be performed using this token</summary>
|
|
||||||
<h2>All databases and tables</h2>
|
|
||||||
<ul>
|
|
||||||
{% for permission in all_actions %}
|
|
||||||
<li><label><input type="checkbox" name="all:{{ permission }}"> {{ permission }}</label></li>
|
|
||||||
{% endfor %}
|
|
||||||
</ul>
|
|
||||||
|
|
||||||
{% for database in database_with_tables %}
|
|
||||||
<h2>All tables in "{{ database.name }}"</h2>
|
|
||||||
<ul>
|
|
||||||
{% for permission in database_actions %}
|
|
||||||
<li><label><input type="checkbox" name="database:{{ database.encoded }}:{{ permission }}"> {{ permission }}</label></li>
|
|
||||||
{% endfor %}
|
|
||||||
</ul>
|
|
||||||
{% endfor %}
|
|
||||||
<h2>Specific tables</h2>
|
|
||||||
{% for database in database_with_tables %}
|
|
||||||
{% for table in database.tables %}
|
|
||||||
<h3>{{ database.name }}: {{ table.name }}</h3>
|
|
||||||
<ul>
|
|
||||||
{% for permission in child_actions %}
|
|
||||||
<li><label><input type="checkbox" name="resource:{{ database.encoded }}:{{ table.encoded }}:{{ permission }}"> {{ permission }}</label></li>
|
|
||||||
{% endfor %}
|
|
||||||
</ul>
|
|
||||||
{% endfor %}
|
|
||||||
{% endfor %}
|
|
||||||
</details>
|
|
||||||
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<script>
|
|
||||||
var expireDuration = document.querySelector('input[name="expire_duration"]');
|
|
||||||
expireDuration.style.display = 'none';
|
|
||||||
var expireType = document.querySelector('select[name="expire_type"]');
|
|
||||||
function showHideExpireDuration() {
|
|
||||||
if (expireType.value) {
|
|
||||||
expireDuration.style.display = 'inline';
|
|
||||||
expireDuration.setAttribute("placeholder", expireType.value.replace("Expires after X ", ""));
|
|
||||||
} else {
|
|
||||||
expireDuration.style.display = 'none';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
showHideExpireDuration();
|
|
||||||
expireType.addEventListener('change', showHideExpireDuration);
|
|
||||||
var copyInput = document.querySelector(".copyable");
|
|
||||||
if (copyInput) {
|
|
||||||
var wrapper = document.querySelector(".copy-link-wrapper");
|
|
||||||
var button = document.createElement("button");
|
|
||||||
button.className = "copyable-copy-button";
|
|
||||||
button.setAttribute("type", "button");
|
|
||||||
button.innerHTML = "Copy to clipboard";
|
|
||||||
button.onclick = (ev) => {
|
|
||||||
ev.preventDefault();
|
|
||||||
copyInput.select();
|
|
||||||
document.execCommand("copy");
|
|
||||||
button.innerHTML = "Copied!";
|
|
||||||
setTimeout(() => {
|
|
||||||
button.innerHTML = "Copy to clipboard";
|
|
||||||
}, 1500);
|
|
||||||
};
|
|
||||||
wrapper.appendChild(button);
|
|
||||||
wrapper.insertAdjacentElement("afterbegin", button);
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
|
|
||||||
{% endblock %}
|
|
||||||
|
|
@ -1,13 +0,0 @@
|
||||||
{% extends "base.html" %}
|
|
||||||
{% block title %}CSRF check failed){% endblock %}
|
|
||||||
{% block content %}
|
|
||||||
<h1>Form origin check failed</h1>
|
|
||||||
|
|
||||||
<p>Your request's origin could not be validated. Please return to the form and submit it again.</p>
|
|
||||||
|
|
||||||
<details><summary>Technical details</summary>
|
|
||||||
<p>Developers: consult Datasette's <a href="https://docs.datasette.io/en/latest/internals.html#csrf-protection">CSRF protection documentation</a>.</p>
|
|
||||||
<p>Error code is {{ message_name }}.</p>
|
|
||||||
</details>
|
|
||||||
|
|
||||||
{% endblock %}
|
|
||||||
|
|
@ -3,87 +3,61 @@
|
||||||
{% block title %}{{ database }}{% endblock %}
|
{% block title %}{{ database }}{% endblock %}
|
||||||
|
|
||||||
{% block extra_head %}
|
{% block extra_head %}
|
||||||
{{- super() -}}
|
{{ super() }}
|
||||||
{% include "_codemirror.html" %}
|
{% include "_codemirror.html" %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block body_class %}db db-{{ database|to_css_class }}{% endblock %}
|
{% block body_class %}db db-{{ database|to_css_class }}{% endblock %}
|
||||||
|
|
||||||
{% block crumbs %}
|
|
||||||
{{ crumbs.nav(request=request, database=database) }}
|
|
||||||
{% endblock %}
|
|
||||||
|
|
||||||
{% block content %}
|
{% block content %}
|
||||||
<div class="page-header" style="border-color: #{{ database_color }}">
|
<div class="hd"><a href="/">home</a></div>
|
||||||
<h1>{{ metadata.title or database }}{% if private %} 🔒{% endif %}</h1>
|
|
||||||
</div>
|
|
||||||
{% set action_links, action_title = database_actions(), "Database actions" %}
|
|
||||||
{% include "_action_menu.html" %}
|
|
||||||
|
|
||||||
{{ top_database() }}
|
<h1 style="padding-left: 10px; border-left: 10px solid #{{ database_color(database) }}">{{ metadata.title or database }}</h1>
|
||||||
|
|
||||||
{% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %}
|
{% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %}
|
||||||
|
|
||||||
{% if allow_execute_sql %}
|
{% if config.allow_sql %}
|
||||||
<form class="sql core" action="{{ urls.database(database) }}/-/query" method="get">
|
<form class="sql" action="{{ database_url(database) }}" method="get">
|
||||||
<h3>Custom SQL query</h3>
|
<h3>Custom SQL query</h3>
|
||||||
<p><textarea id="sql-editor" name="sql">{% if tables %}select * from {{ tables[0].name|escape_sqlite }}{% else %}select sqlite_version(){% endif %}</textarea></p>
|
<p><textarea name="sql">{% if tables %}select * from {{ tables[0].name|escape_sqlite }}{% else %}select sqlite_version(){% endif %}</textarea></p>
|
||||||
<p>
|
<p><input type="submit" value="Run SQL"></p>
|
||||||
<button id="sql-format" type="button" hidden>Format SQL</button>
|
|
||||||
<input type="submit" value="Run SQL">
|
|
||||||
</p>
|
|
||||||
</form>
|
</form>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if attached_databases %}
|
|
||||||
<div class="message-info">
|
|
||||||
<p>The following databases are attached to this connection, and can be used for cross-database joins:</p>
|
|
||||||
<ul class="bullets">
|
|
||||||
{% for db_name in attached_databases %}
|
|
||||||
<li><strong>{{ db_name }}</strong> - <a href="{{ urls.database(db_name) }}/-/query?sql=select+*+from+[{{ db_name }}].sqlite_master+where+type='table'">tables</a></li>
|
|
||||||
{% endfor %}
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% if queries %}
|
|
||||||
<h2 id="queries">Queries</h2>
|
|
||||||
<ul class="bullets">
|
|
||||||
{% for query in queries %}
|
|
||||||
<li><a href="{{ urls.query(database, query.name) }}{% if query.fragment %}#{{ query.fragment }}{% endif %}" title="{{ query.description or query.sql }}">{{ query.title or query.name }}</a>{% if query.private %} 🔒{% endif %}</li>
|
|
||||||
{% endfor %}
|
|
||||||
</ul>
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% if tables %}
|
|
||||||
<h2 id="tables">Tables <a style="font-weight: normal; font-size: 0.75em; padding-left: 0.5em;" href="{{ urls.database(database) }}/-/schema">schema</a></h2>
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% for table in tables %}
|
{% for table in tables %}
|
||||||
{% if show_hidden or not table.hidden %}
|
{% if show_hidden or not table.hidden %}
|
||||||
<div class="db-table">
|
<div class="db-table">
|
||||||
<h3><a href="{{ urls.table(database, table.name) }}">{{ table.name }}</a>{% if table.private %} 🔒{% endif %}{% if table.hidden %}<em> (hidden)</em>{% endif %}</h3>
|
<h2><a href="{{ database_url(database) }}/{{ table.name|quote_plus }}">{{ table.name }}</a>{% if table.hidden %}<em> (hidden)</em>{% endif %}</h2>
|
||||||
<p><em>{% for column in table.columns %}{{ column }}{% if not loop.last %}, {% endif %}{% endfor %}</em></p>
|
<p><em>{% for column in table.columns[:9] %}{{ column }}{% if not loop.last %}, {% endif %}{% endfor %}{% if table.columns|length > 9 %}...{% endif %}</em></p>
|
||||||
<p>{% if table.count is none %}Many rows{% elif table.count == count_limit + 1 %}>{{ "{:,}".format(count_limit) }} rows{% else %}{{ "{:,}".format(table.count) }} row{% if table.count == 1 %}{% else %}s{% endif %}{% endif %}</p>
|
<p>{% if table.count is none %}Many rows{% else %}{{ "{:,}".format(table.count) }} row{% if table.count == 1 %}{% else %}s{% endif %}{% endif %}</p>
|
||||||
</div>
|
</div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
|
||||||
{% if hidden_count and not show_hidden %}
|
{% if hidden_count and not show_hidden %}
|
||||||
<p>... and <a href="{{ urls.database(database) }}?_show_hidden=1">{{ "{:,}".format(hidden_count) }} hidden table{% if hidden_count == 1 %}{% else %}s{% endif %}</a></p>
|
<p>... and <a href="{{ database_url(database) }}?_show_hidden=1">{{ "{:,}".format(hidden_count) }} hidden table{% if hidden_count == 1 %}{% else %}s{% endif %}</a></p>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if views %}
|
{% if views %}
|
||||||
<h2 id="views">Views</h2>
|
<h2>Views</h2>
|
||||||
<ul class="bullets">
|
<ul>
|
||||||
{% for view in views %}
|
{% for view in views %}
|
||||||
<li><a href="{{ urls.database(database) }}/{{ view.name|urlencode }}">{{ view.name }}</a>{% if view.private %} 🔒{% endif %}</li>
|
<li><a href="{{ database_url(database) }}/{{ view|urlencode }}">{{ view }}</a></li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if queries %}
|
||||||
|
<h2>Queries</h2>
|
||||||
|
<ul>
|
||||||
|
{% for query in queries %}
|
||||||
|
<li><a href="{{ database_url(database) }}/{{ query.name|urlencode }}" title="{{ query.description or query.sql }}">{{ query.title or query.name }}</a></li>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</ul>
|
</ul>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if allow_download %}
|
{% if allow_download %}
|
||||||
<p class="download-sqlite">Download SQLite DB: <a href="{{ urls.database(database) }}.db" rel="nofollow">{{ database }}.db</a> <em>{{ format_bytes(size) }}</em></p>
|
<p class="download-sqlite">Download SQLite DB: <a href="{{ database_url(database) }}.db">{{ database }}.db</a> <em>{{ format_bytes(size) }}</em></p>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% include "_codemirror_foot.html" %}
|
{% include "_codemirror_foot.html" %}
|
||||||
|
|
|
||||||
|
|
@ -1,43 +0,0 @@
|
||||||
{% extends "base.html" %}
|
|
||||||
|
|
||||||
{% block title %}Registered Actions{% endblock %}
|
|
||||||
|
|
||||||
{% block content %}
|
|
||||||
<h1>Registered actions</h1>
|
|
||||||
|
|
||||||
{% set current_tab = "actions" %}
|
|
||||||
{% include "_permissions_debug_tabs.html" %}
|
|
||||||
|
|
||||||
<p style="margin-bottom: 2em;">
|
|
||||||
This Datasette instance has registered {{ data|length }} action{{ data|length != 1 and "s" or "" }}.
|
|
||||||
Actions are used by the permission system to control access to different features.
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<table class="rows-and-columns">
|
|
||||||
<thead>
|
|
||||||
<tr>
|
|
||||||
<th>Name</th>
|
|
||||||
<th>Abbr</th>
|
|
||||||
<th>Description</th>
|
|
||||||
<th>Resource</th>
|
|
||||||
<th>Takes Parent</th>
|
|
||||||
<th>Takes Child</th>
|
|
||||||
<th>Also Requires</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody>
|
|
||||||
{% for action in data %}
|
|
||||||
<tr>
|
|
||||||
<td><strong>{{ action.name }}</strong></td>
|
|
||||||
<td>{% if action.abbr %}<code>{{ action.abbr }}</code>{% endif %}</td>
|
|
||||||
<td>{{ action.description or "" }}</td>
|
|
||||||
<td>{% if action.resource_class %}<code>{{ action.resource_class }}</code>{% endif %}</td>
|
|
||||||
<td>{% if action.takes_parent %}✓{% endif %}</td>
|
|
||||||
<td>{% if action.takes_child %}✓{% endif %}</td>
|
|
||||||
<td>{% if action.also_requires %}<code>{{ action.also_requires }}</code>{% endif %}</td>
|
|
||||||
</tr>
|
|
||||||
{% endfor %}
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
{% endblock %}
|
|
||||||
|
|
@ -1,229 +0,0 @@
|
||||||
{% extends "base.html" %}
|
|
||||||
|
|
||||||
{% block title %}Allowed Resources{% endblock %}
|
|
||||||
|
|
||||||
{% block extra_head %}
|
|
||||||
<script src="{{ base_url }}-/static/json-format-highlight-1.0.1.js"></script>
|
|
||||||
{% include "_permission_ui_styles.html" %}
|
|
||||||
{% include "_debug_common_functions.html" %}
|
|
||||||
{% endblock %}
|
|
||||||
|
|
||||||
{% block content %}
|
|
||||||
<h1>Allowed resources</h1>
|
|
||||||
|
|
||||||
{% set current_tab = "allowed" %}
|
|
||||||
{% include "_permissions_debug_tabs.html" %}
|
|
||||||
|
|
||||||
<p>Use this tool to check which resources the current actor is allowed to access for a given permission action. It queries the <code>/-/allowed.json</code> API endpoint.</p>
|
|
||||||
|
|
||||||
{% if request.actor %}
|
|
||||||
<p>Current actor: <strong>{{ request.actor.get("id", "anonymous") }}</strong></p>
|
|
||||||
{% else %}
|
|
||||||
<p>Current actor: <strong>anonymous (not logged in)</strong></p>
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
<div class="permission-form">
|
|
||||||
<form id="allowed-form" method="get" action="{{ urls.path("-/allowed") }}">
|
|
||||||
<div class="form-section">
|
|
||||||
<label for="action">Action (permission name):</label>
|
|
||||||
<select id="action" name="action" required>
|
|
||||||
<option value="">Select an action...</option>
|
|
||||||
{% for action_name in supported_actions %}
|
|
||||||
<option value="{{ action_name }}">{{ action_name }}</option>
|
|
||||||
{% endfor %}
|
|
||||||
</select>
|
|
||||||
<small>Only certain actions are supported by this endpoint</small>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="form-section">
|
|
||||||
<label for="parent">Filter by parent (optional):</label>
|
|
||||||
<input type="text" id="parent" name="parent" placeholder="e.g., database name">
|
|
||||||
<small>Filter results to a specific parent resource</small>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="form-section">
|
|
||||||
<label for="child">Filter by child (optional):</label>
|
|
||||||
<input type="text" id="child" name="child" placeholder="e.g., table name">
|
|
||||||
<small>Filter results to a specific child resource (requires parent to be set)</small>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="form-section">
|
|
||||||
<label for="page_size">Page size:</label>
|
|
||||||
<input type="number" id="page_size" name="page_size" value="50" min="1" max="200" style="max-width: 100px;">
|
|
||||||
<small>Number of results per page (max 200)</small>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="form-actions">
|
|
||||||
<button type="submit" class="submit-btn" id="submit-btn">Check Allowed Resources</button>
|
|
||||||
</div>
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div id="results-container" style="display: none;">
|
|
||||||
<div class="results-header">
|
|
||||||
<h2>Results</h2>
|
|
||||||
<div class="results-count" id="results-count"></div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div id="results-content"></div>
|
|
||||||
|
|
||||||
<div id="pagination" class="pagination"></div>
|
|
||||||
|
|
||||||
<details style="margin-top: 2em;">
|
|
||||||
<summary style="cursor: pointer; font-weight: bold;">Raw JSON response</summary>
|
|
||||||
<pre id="raw-json" style="margin-top: 1em; padding: 1em; background-color: #f5f5f5; border: 1px solid #ddd; border-radius: 3px; overflow-x: auto;"></pre>
|
|
||||||
</details>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<script>
|
|
||||||
const form = document.getElementById('allowed-form');
|
|
||||||
const resultsContainer = document.getElementById('results-container');
|
|
||||||
const resultsContent = document.getElementById('results-content');
|
|
||||||
const resultsCount = document.getElementById('results-count');
|
|
||||||
const pagination = document.getElementById('pagination');
|
|
||||||
const submitBtn = document.getElementById('submit-btn');
|
|
||||||
const hasDebugPermission = {{ 'true' if has_debug_permission else 'false' }};
|
|
||||||
|
|
||||||
// Populate form on initial load
|
|
||||||
(function() {
|
|
||||||
const params = populateFormFromURL();
|
|
||||||
const action = params.get('action');
|
|
||||||
const page = params.get('page');
|
|
||||||
if (action) {
|
|
||||||
fetchResults(page ? parseInt(page) : 1);
|
|
||||||
}
|
|
||||||
})();
|
|
||||||
|
|
||||||
async function fetchResults(page = 1) {
|
|
||||||
submitBtn.disabled = true;
|
|
||||||
submitBtn.textContent = 'Loading...';
|
|
||||||
|
|
||||||
const formData = new FormData(form);
|
|
||||||
const params = new URLSearchParams();
|
|
||||||
|
|
||||||
for (const [key, value] of formData.entries()) {
|
|
||||||
if (value && key !== 'page_size') {
|
|
||||||
params.append(key, value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const pageSize = document.getElementById('page_size').value || '50';
|
|
||||||
params.append('page', page.toString());
|
|
||||||
params.append('page_size', pageSize);
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await fetch('{{ urls.path("-/allowed.json") }}?' + params.toString(), {
|
|
||||||
method: 'GET',
|
|
||||||
headers: {
|
|
||||||
'Accept': 'application/json',
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
const data = await response.json();
|
|
||||||
|
|
||||||
if (response.ok) {
|
|
||||||
displayResults(data);
|
|
||||||
} else {
|
|
||||||
displayError(data);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
displayError({ error: error.message });
|
|
||||||
} finally {
|
|
||||||
submitBtn.disabled = false;
|
|
||||||
submitBtn.textContent = 'Check Allowed Resources';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function displayResults(data) {
|
|
||||||
resultsContainer.style.display = 'block';
|
|
||||||
|
|
||||||
// Update count
|
|
||||||
resultsCount.textContent = `Showing ${data.items.length} of ${data.total} total resources (page ${data.page})`;
|
|
||||||
|
|
||||||
// Display results table
|
|
||||||
if (data.items.length === 0) {
|
|
||||||
resultsContent.innerHTML = '<div class="no-results">No allowed resources found for this action.</div>';
|
|
||||||
} else {
|
|
||||||
let html = '<table class="results-table">';
|
|
||||||
html += '<thead><tr>';
|
|
||||||
html += '<th>Resource Path</th>';
|
|
||||||
html += '<th>Parent</th>';
|
|
||||||
html += '<th>Child</th>';
|
|
||||||
if (hasDebugPermission) {
|
|
||||||
html += '<th>Reason</th>';
|
|
||||||
}
|
|
||||||
html += '</tr></thead>';
|
|
||||||
html += '<tbody>';
|
|
||||||
|
|
||||||
for (const item of data.items) {
|
|
||||||
html += '<tr>';
|
|
||||||
html += `<td><span class="resource-path">${escapeHtml(item.resource || '/')}</span></td>`;
|
|
||||||
html += `<td>${escapeHtml(item.parent || '—')}</td>`;
|
|
||||||
html += `<td>${escapeHtml(item.child || '—')}</td>`;
|
|
||||||
if (hasDebugPermission) {
|
|
||||||
// Display reason as JSON array
|
|
||||||
let reasonHtml = '—';
|
|
||||||
if (item.reason && Array.isArray(item.reason)) {
|
|
||||||
reasonHtml = `<code>${escapeHtml(JSON.stringify(item.reason))}</code>`;
|
|
||||||
}
|
|
||||||
html += `<td>${reasonHtml}</td>`;
|
|
||||||
}
|
|
||||||
html += '</tr>';
|
|
||||||
}
|
|
||||||
|
|
||||||
html += '</tbody></table>';
|
|
||||||
resultsContent.innerHTML = html;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update pagination
|
|
||||||
pagination.innerHTML = '';
|
|
||||||
if (data.previous_url || data.next_url) {
|
|
||||||
if (data.previous_url) {
|
|
||||||
const prevLink = document.createElement('a');
|
|
||||||
prevLink.href = data.previous_url;
|
|
||||||
prevLink.textContent = '← Previous';
|
|
||||||
pagination.appendChild(prevLink);
|
|
||||||
}
|
|
||||||
|
|
||||||
const pageInfo = document.createElement('span');
|
|
||||||
pageInfo.textContent = `Page ${data.page}`;
|
|
||||||
pagination.appendChild(pageInfo);
|
|
||||||
|
|
||||||
if (data.next_url) {
|
|
||||||
const nextLink = document.createElement('a');
|
|
||||||
nextLink.href = data.next_url;
|
|
||||||
nextLink.textContent = 'Next →';
|
|
||||||
pagination.appendChild(nextLink);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update raw JSON
|
|
||||||
document.getElementById('raw-json').innerHTML = jsonFormatHighlight(data);
|
|
||||||
}
|
|
||||||
|
|
||||||
function displayError(data) {
|
|
||||||
resultsContainer.style.display = 'block';
|
|
||||||
resultsCount.textContent = '';
|
|
||||||
pagination.innerHTML = '';
|
|
||||||
|
|
||||||
resultsContent.innerHTML = `<div class="error-message">Error: ${escapeHtml(data.error || 'Unknown error')}</div>`;
|
|
||||||
|
|
||||||
document.getElementById('raw-json').innerHTML = jsonFormatHighlight(data);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Disable child input if parent is empty
|
|
||||||
const parentInput = document.getElementById('parent');
|
|
||||||
const childInput = document.getElementById('child');
|
|
||||||
|
|
||||||
parentInput.addEventListener('input', () => {
|
|
||||||
childInput.disabled = !parentInput.value;
|
|
||||||
if (!parentInput.value) {
|
|
||||||
childInput.value = '';
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Initialize disabled state
|
|
||||||
childInput.disabled = !parentInput.value;
|
|
||||||
</script>
|
|
||||||
|
|
||||||
{% endblock %}
|
|
||||||
|
|
@ -1,270 +0,0 @@
|
||||||
{% extends "base.html" %}
|
|
||||||
|
|
||||||
{% block title %}Permission Check{% endblock %}
|
|
||||||
|
|
||||||
{% block extra_head %}
|
|
||||||
<script src="{{ base_url }}-/static/json-format-highlight-1.0.1.js"></script>
|
|
||||||
{% include "_permission_ui_styles.html" %}
|
|
||||||
{% include "_debug_common_functions.html" %}
|
|
||||||
<style>
|
|
||||||
#output {
|
|
||||||
margin-top: 2em;
|
|
||||||
padding: 1em;
|
|
||||||
border-radius: 5px;
|
|
||||||
}
|
|
||||||
#output.allowed {
|
|
||||||
background-color: #e8f5e9;
|
|
||||||
border: 2px solid #4caf50;
|
|
||||||
}
|
|
||||||
#output.denied {
|
|
||||||
background-color: #ffebee;
|
|
||||||
border: 2px solid #f44336;
|
|
||||||
}
|
|
||||||
#output h2 {
|
|
||||||
margin-top: 0;
|
|
||||||
}
|
|
||||||
#output .result-badge {
|
|
||||||
display: inline-block;
|
|
||||||
padding: 0.3em 0.8em;
|
|
||||||
border-radius: 3px;
|
|
||||||
font-weight: bold;
|
|
||||||
font-size: 1.1em;
|
|
||||||
}
|
|
||||||
#output .allowed-badge {
|
|
||||||
background-color: #4caf50;
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
#output .denied-badge {
|
|
||||||
background-color: #f44336;
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
.details-section {
|
|
||||||
margin-top: 1em;
|
|
||||||
}
|
|
||||||
.details-section dt {
|
|
||||||
font-weight: bold;
|
|
||||||
margin-top: 0.5em;
|
|
||||||
}
|
|
||||||
.details-section dd {
|
|
||||||
margin-left: 1em;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
{% endblock %}
|
|
||||||
|
|
||||||
{% block content %}
|
|
||||||
<h1>Permission check</h1>
|
|
||||||
|
|
||||||
{% set current_tab = "check" %}
|
|
||||||
{% include "_permissions_debug_tabs.html" %}
|
|
||||||
|
|
||||||
<p>Use this tool to test permission checks for the current actor. It queries the <code>/-/check.json</code> API endpoint.</p>
|
|
||||||
|
|
||||||
{% if request.actor %}
|
|
||||||
<p>Current actor: <strong>{{ request.actor.get("id", "anonymous") }}</strong></p>
|
|
||||||
{% else %}
|
|
||||||
<p>Current actor: <strong>anonymous (not logged in)</strong></p>
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
<div class="permission-form">
|
|
||||||
<form id="check-form" method="get" action="{{ urls.path("-/check") }}">
|
|
||||||
<div class="form-section">
|
|
||||||
<label for="action">Action (permission name):</label>
|
|
||||||
<select id="action" name="action" required>
|
|
||||||
<option value="">Select an action...</option>
|
|
||||||
{% for action_name in sorted_actions %}
|
|
||||||
<option value="{{ action_name }}">{{ action_name }}</option>
|
|
||||||
{% endfor %}
|
|
||||||
</select>
|
|
||||||
<small>The permission action to check</small>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="form-section">
|
|
||||||
<label for="parent">Parent resource (optional):</label>
|
|
||||||
<input type="text" id="parent" name="parent" placeholder="e.g., database name">
|
|
||||||
<small>For database-level permissions, specify the database name</small>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="form-section">
|
|
||||||
<label for="child">Child resource (optional):</label>
|
|
||||||
<input type="text" id="child" name="child" placeholder="e.g., table name">
|
|
||||||
<small>For table-level permissions, specify the table name (requires parent)</small>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="form-actions">
|
|
||||||
<button type="submit" class="submit-btn" id="submit-btn">Check Permission</button>
|
|
||||||
</div>
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div id="output" style="display: none;">
|
|
||||||
<h2>Result: <span class="result-badge" id="result-badge"></span></h2>
|
|
||||||
|
|
||||||
<dl class="details-section">
|
|
||||||
<dt>Action:</dt>
|
|
||||||
<dd id="result-action"></dd>
|
|
||||||
|
|
||||||
<dt>Resource Path:</dt>
|
|
||||||
<dd id="result-resource"></dd>
|
|
||||||
|
|
||||||
<dt>Actor ID:</dt>
|
|
||||||
<dd id="result-actor"></dd>
|
|
||||||
|
|
||||||
<div id="additional-details"></div>
|
|
||||||
</dl>
|
|
||||||
|
|
||||||
<details style="margin-top: 1em;">
|
|
||||||
<summary style="cursor: pointer; font-weight: bold;">Raw JSON response</summary>
|
|
||||||
<pre id="raw-json" style="margin-top: 1em; padding: 1em; background-color: #f5f5f5; border: 1px solid #ddd; border-radius: 3px; overflow-x: auto;"></pre>
|
|
||||||
</details>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<script>
|
|
||||||
const form = document.getElementById('check-form');
|
|
||||||
const output = document.getElementById('output');
|
|
||||||
const submitBtn = document.getElementById('submit-btn');
|
|
||||||
|
|
||||||
async function performCheck() {
|
|
||||||
submitBtn.disabled = true;
|
|
||||||
submitBtn.textContent = 'Checking...';
|
|
||||||
|
|
||||||
const formData = new FormData(form);
|
|
||||||
const params = new URLSearchParams();
|
|
||||||
|
|
||||||
for (const [key, value] of formData.entries()) {
|
|
||||||
if (value) {
|
|
||||||
params.append(key, value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await fetch('{{ urls.path("-/check.json") }}?' + params.toString(), {
|
|
||||||
method: 'GET',
|
|
||||||
headers: {
|
|
||||||
'Accept': 'application/json',
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
const data = await response.json();
|
|
||||||
|
|
||||||
if (response.ok) {
|
|
||||||
displayResult(data);
|
|
||||||
} else {
|
|
||||||
displayError(data);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
alert('Error: ' + error.message);
|
|
||||||
} finally {
|
|
||||||
submitBtn.disabled = false;
|
|
||||||
submitBtn.textContent = 'Check Permission';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Populate form on initial load
|
|
||||||
(function() {
|
|
||||||
const params = populateFormFromURL();
|
|
||||||
const action = params.get('action');
|
|
||||||
if (action) {
|
|
||||||
performCheck();
|
|
||||||
}
|
|
||||||
})();
|
|
||||||
|
|
||||||
function displayResult(data) {
|
|
||||||
output.style.display = 'block';
|
|
||||||
|
|
||||||
// Set badge and styling
|
|
||||||
const resultBadge = document.getElementById('result-badge');
|
|
||||||
if (data.allowed) {
|
|
||||||
output.className = 'allowed';
|
|
||||||
resultBadge.className = 'result-badge allowed-badge';
|
|
||||||
resultBadge.textContent = 'ALLOWED ✓';
|
|
||||||
} else {
|
|
||||||
output.className = 'denied';
|
|
||||||
resultBadge.className = 'result-badge denied-badge';
|
|
||||||
resultBadge.textContent = 'DENIED ✗';
|
|
||||||
}
|
|
||||||
|
|
||||||
// Basic details
|
|
||||||
document.getElementById('result-action').textContent = data.action || 'N/A';
|
|
||||||
document.getElementById('result-resource').textContent = data.resource?.path || '/';
|
|
||||||
document.getElementById('result-actor').textContent = data.actor_id || 'anonymous';
|
|
||||||
|
|
||||||
// Additional details
|
|
||||||
const additionalDetails = document.getElementById('additional-details');
|
|
||||||
additionalDetails.innerHTML = '';
|
|
||||||
|
|
||||||
if (data.reason !== undefined) {
|
|
||||||
const dt = document.createElement('dt');
|
|
||||||
dt.textContent = 'Reason:';
|
|
||||||
const dd = document.createElement('dd');
|
|
||||||
dd.textContent = data.reason || 'N/A';
|
|
||||||
additionalDetails.appendChild(dt);
|
|
||||||
additionalDetails.appendChild(dd);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (data.source_plugin !== undefined) {
|
|
||||||
const dt = document.createElement('dt');
|
|
||||||
dt.textContent = 'Source Plugin:';
|
|
||||||
const dd = document.createElement('dd');
|
|
||||||
dd.textContent = data.source_plugin || 'N/A';
|
|
||||||
additionalDetails.appendChild(dt);
|
|
||||||
additionalDetails.appendChild(dd);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (data.used_default !== undefined) {
|
|
||||||
const dt = document.createElement('dt');
|
|
||||||
dt.textContent = 'Used Default:';
|
|
||||||
const dd = document.createElement('dd');
|
|
||||||
dd.textContent = data.used_default ? 'Yes' : 'No';
|
|
||||||
additionalDetails.appendChild(dt);
|
|
||||||
additionalDetails.appendChild(dd);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (data.depth !== undefined) {
|
|
||||||
const dt = document.createElement('dt');
|
|
||||||
dt.textContent = 'Depth:';
|
|
||||||
const dd = document.createElement('dd');
|
|
||||||
dd.textContent = data.depth;
|
|
||||||
additionalDetails.appendChild(dt);
|
|
||||||
additionalDetails.appendChild(dd);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Raw JSON
|
|
||||||
document.getElementById('raw-json').innerHTML = jsonFormatHighlight(data);
|
|
||||||
|
|
||||||
// Scroll to output
|
|
||||||
output.scrollIntoView({ behavior: 'smooth', block: 'nearest' });
|
|
||||||
}
|
|
||||||
|
|
||||||
function displayError(data) {
|
|
||||||
output.style.display = 'block';
|
|
||||||
output.className = 'denied';
|
|
||||||
|
|
||||||
const resultBadge = document.getElementById('result-badge');
|
|
||||||
resultBadge.className = 'result-badge denied-badge';
|
|
||||||
resultBadge.textContent = 'ERROR';
|
|
||||||
|
|
||||||
document.getElementById('result-action').textContent = 'N/A';
|
|
||||||
document.getElementById('result-resource').textContent = 'N/A';
|
|
||||||
document.getElementById('result-actor').textContent = 'N/A';
|
|
||||||
|
|
||||||
const additionalDetails = document.getElementById('additional-details');
|
|
||||||
additionalDetails.innerHTML = '<dt>Error:</dt><dd>' + (data.error || 'Unknown error') + '</dd>';
|
|
||||||
|
|
||||||
document.getElementById('raw-json').innerHTML = jsonFormatHighlight(data);
|
|
||||||
|
|
||||||
output.scrollIntoView({ behavior: 'smooth', block: 'nearest' });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Disable child input if parent is empty
|
|
||||||
const parentInput = document.getElementById('parent');
|
|
||||||
const childInput = document.getElementById('child');
|
|
||||||
|
|
||||||
childInput.addEventListener('focus', () => {
|
|
||||||
if (!parentInput.value) {
|
|
||||||
alert('Please specify a parent resource first before adding a child resource.');
|
|
||||||
parentInput.focus();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
</script>
|
|
||||||
|
|
||||||
{% endblock %}
|
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue