Ported setup.py to pyproject.toml (#2555)

* Ported setup.py to pyproject.toml, refs #2553

* Make fixtures tests less flaky

The in-memory fixtures table was being shared between different
instances of the test client, leading to occasional errors when
running the full test suite.
This commit is contained in:
Simon Willison 2025-10-30 10:41:41 -07:00 committed by GitHub
commit ce4b0794b2
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
15 changed files with 141 additions and 147 deletions

View file

@ -25,7 +25,7 @@ jobs:
name: Configure pip caching
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/pyproject.toml') }}
restore-keys: |
${{ runner.os }}-pip-
- name: Install Python dependencies

View file

@ -20,7 +20,7 @@ jobs:
with:
python-version: ${{ matrix.python-version }}
cache: pip
cache-dependency-path: setup.py
cache-dependency-path: pyproject.toml
- name: Install dependencies
run: |
pip install -e '.[test]'
@ -41,7 +41,7 @@ jobs:
with:
python-version: '3.13'
cache: pip
cache-dependency-path: setup.py
cache-dependency-path: pyproject.toml
- name: Install dependencies
run: |
pip install setuptools wheel build
@ -62,7 +62,7 @@ jobs:
with:
python-version: '3.10'
cache: pip
cache-dependency-path: setup.py
cache-dependency-path: pyproject.toml
- name: Install dependencies
run: |
python -m pip install -e .[docs]

View file

@ -15,7 +15,7 @@ jobs:
with:
python-version: '3.11'
cache: 'pip'
cache-dependency-path: '**/setup.py'
cache-dependency-path: '**/pyproject.toml'
- name: Install dependencies
run: |
pip install -e '.[docs]'

View file

@ -21,7 +21,7 @@ jobs:
with:
python-version: '3.12'
cache: 'pip'
cache-dependency-path: '**/setup.py'
cache-dependency-path: '**/pyproject.toml'
- name: Install Python dependencies
run: |
python -m pip install --upgrade pip

View file

@ -18,7 +18,7 @@ jobs:
with:
python-version: "3.10"
cache: 'pip'
cache-dependency-path: '**/setup.py'
cache-dependency-path: '**/pyproject.toml'
- name: Cache Playwright browsers
uses: actions/cache@v4
with:

View file

@ -32,7 +32,7 @@ jobs:
python-version: ${{ matrix.python-version }}
allow-prereleases: true
cache: pip
cache-dependency-path: setup.py
cache-dependency-path: pyproject.toml
- name: Set up SQLite ${{ matrix.sqlite-version }}
uses: asg017/sqlite-versions@71ea0de37ae739c33e447af91ba71dda8fcf22e6
with:

View file

@ -19,7 +19,7 @@ jobs:
python-version: ${{ matrix.python-version }}
allow-prereleases: true
cache: pip
cache-dependency-path: setup.py
cache-dependency-path: pyproject.toml
- name: Build extension for --load-extension test
run: |-
(cd tests && gcc ext.c -fPIC -shared -o ext.so)

3
.gitignore vendored
View file

@ -5,6 +5,9 @@ scratchpad
.vscode
uv.lock
data.db
# We don't use Pipfile, so ignore them
Pipfile
Pipfile.lock

View file

@ -5,39 +5,39 @@ export DATASETTE_SECRET := "not_a_secret"
# Setup project
@init:
pipenv run pip install -e '.[test,docs]'
uv sync --extra test --extra docs
# Run pytest with supplied options
@test *options:
pipenv run pytest {{options}}
@test *options: init
uv run pytest -n auto {{options}}
@codespell:
pipenv run codespell README.md --ignore-words docs/codespell-ignore-words.txt
pipenv run codespell docs/*.rst --ignore-words docs/codespell-ignore-words.txt
pipenv run codespell datasette -S datasette/static --ignore-words docs/codespell-ignore-words.txt
pipenv run codespell tests --ignore-words docs/codespell-ignore-words.txt
uv run codespell README.md --ignore-words docs/codespell-ignore-words.txt
uv run codespell docs/*.rst --ignore-words docs/codespell-ignore-words.txt
uv run codespell datasette -S datasette/static --ignore-words docs/codespell-ignore-words.txt
uv run codespell tests --ignore-words docs/codespell-ignore-words.txt
# Run linters: black, flake8, mypy, cog
@lint: codespell
pipenv run black . --check
pipenv run flake8
pipenv run cog --check README.md docs/*.rst
uv run black . --check
uv run flake8
uv run cog --check README.md docs/*.rst
# Rebuild docs with cog
@cog:
pipenv run cog -r README.md docs/*.rst
uv run cog -r README.md docs/*.rst
# Serve live docs on localhost:8000
@docs: cog blacken-docs
cd docs && pipenv run make livehtml
cd docs && uv run make livehtml
# Apply Black
@black:
pipenv run black .
uv run black .
# Apply blacken-docs
@blacken-docs:
pipenv run blacken-docs -l 60 docs/*.rst
uv run blacken-docs -l 60 docs/*.rst
# Apply prettier
@prettier:
@ -46,7 +46,7 @@ export DATASETTE_SECRET := "not_a_secret"
# Format code with both black and prettier
@format: black prettier blacken-docs
@serve:
pipenv run sqlite-utils create-database data.db
pipenv run sqlite-utils create-table data.db docs id integer title text --pk id --ignore
pipenv run python -m datasette data.db --root --reload
@serve *options:
uv run sqlite-utils create-database data.db
uv run sqlite-utils create-table data.db docs id integer title text --pk id --ignore
uv run python -m datasette data.db --root --reload {{options}}

View file

@ -42,7 +42,7 @@ The next step is to create a virtual environment for your project and use it to
# Install Datasette and its testing dependencies
python3 -m pip install -e '.[test]'
That last line does most of the work: ``pip install -e`` means "install this package in a way that allows me to edit the source code in place". The ``.[test]`` option means "use the setup.py in this directory and install the optional testing dependencies as well".
That last line does most of the work: ``pip install -e`` means "install this package in a way that allows me to edit the source code in place". The ``.[test]`` option means "install the optional testing dependencies as well".
.. _contributing_running_tests:
@ -160,7 +160,7 @@ If any of your code does not conform to Black you can run this to automatically
::
reformatted ../datasette/setup.py
reformatted ../datasette/app.py
All done! ✨ 🍰 ✨
1 file reformatted, 94 files left unchanged.

View file

@ -691,7 +691,7 @@ Help text (from the docstring for the function plus any defined Click arguments
Plugins can register multiple commands by making multiple calls to the ``@cli.command()`` decorator. Consult the `Click documentation <https://click.palletsprojects.com/>`__ for full details on how to build a CLI command, including how to define arguments and options.
Note that ``register_commands()`` plugins cannot used with the :ref:`--plugins-dir mechanism <writing_plugins_one_off>` - they need to be installed into the same virtual environment as Datasette using ``pip install``. Provided it has a ``setup.py`` file (see :ref:`writing_plugins_packaging`) you can run ``pip install`` directly against the directory in which you are developing your plugin like so::
Note that ``register_commands()`` plugins cannot used with the :ref:`--plugins-dir mechanism <writing_plugins_one_off>` - they need to be installed into the same virtual environment as Datasette using ``pip install``. Provided it has a ``pyproject.toml`` file (see :ref:`writing_plugins_packaging`) you can run ``pip install`` directly against the directory in which you are developing your plugin like so::
pip install -e path/to/my/datasette-plugin

View file

@ -33,16 +33,16 @@ You can install these packages like so::
pip install pytest pytest-asyncio
If you are building an installable package you can add them as test dependencies to your ``setup.py`` module like this:
If you are building an installable package you can add them as test dependencies to your ``pyproject.toml`` file like this:
.. code-block:: python
.. code-block:: toml
setup(
name="datasette-my-plugin",
[project]
name = "datasette-my-plugin"
# ...
extras_require={"test": ["pytest", "pytest-asyncio"]},
tests_require=["datasette-my-plugin[test]"],
)
[project.optional-dependencies]
test = ["pytest", "pytest-asyncio"]
You can then install the test dependencies like so::

93
pyproject.toml Normal file
View file

@ -0,0 +1,93 @@
[project]
name = "datasette"
dynamic = ["version"]
description = "An open source multi-tool for exploring and publishing data"
readme = { file = "README.md", content-type = "text/markdown" }
authors = [
{ name = "Simon Willison" },
]
license = "Apache-2.0"
requires-python = ">=3.10"
classifiers = [
"Development Status :: 4 - Beta",
"Framework :: Datasette",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"Intended Audience :: End Users/Desktop",
"Topic :: Database",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
"Programming Language :: Python :: 3.14",
]
dependencies = [
"asgiref>=3.2.10",
"click>=7.1.1",
"click-default-group>=1.2.3",
"Jinja2>=2.10.3",
"hupper>=1.9",
"httpx>=0.20",
"pluggy>=1.0",
"uvicorn>=0.11",
"aiofiles>=0.4",
"janus>=0.6.2",
"asgi-csrf>=0.10",
"PyYAML>=5.3",
"mergedeep>=1.1.1",
"itsdangerous>=1.1",
"sqlite-utils>=3.30",
"asyncinject>=0.6.1",
"setuptools",
"pip",
]
[project.urls]
Homepage = "https://datasette.io/"
Documentation = "https://docs.datasette.io/en/stable/"
Changelog = "https://docs.datasette.io/en/stable/changelog.html"
"Live demo" = "https://latest.datasette.io/"
"Source code" = "https://github.com/simonw/datasette"
Issues = "https://github.com/simonw/datasette/issues"
CI = "https://github.com/simonw/datasette/actions?query=workflow%3ATest"
[project.scripts]
datasette = "datasette.cli:cli"
[project.optional-dependencies]
docs = [
"Sphinx==7.4.7",
"furo==2025.9.25",
"sphinx-autobuild",
"codespell>=2.2.5",
"blacken-docs",
"sphinx-copybutton",
"sphinx-inline-tabs",
"ruamel.yaml",
]
test = [
"pytest>=5.2.2",
"pytest-xdist>=2.2.1",
"pytest-asyncio>=1.2.0",
"beautifulsoup4>=4.8.1",
"black==25.9.0",
"blacken-docs==1.20.0",
"pytest-timeout>=1.4.2",
"trustme>=0.7",
"cogapp>=3.3.0",
]
rich = ["rich"]
[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"
[tool.setuptools.packages.find]
include = ["datasette*"]
[tool.setuptools.package-data]
datasette = ["templates/*.html"]
[tool.setuptools.dynamic]
version = {attr = "datasette.version.__version__"}

107
setup.py
View file

@ -1,107 +0,0 @@
from setuptools import setup, find_packages
import os
def get_long_description():
with open(
os.path.join(os.path.dirname(os.path.abspath(__file__)), "README.md"),
encoding="utf8",
) as fp:
return fp.read()
def get_version():
path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "datasette", "version.py"
)
g = {}
with open(path) as fp:
exec(fp.read(), g)
return g["__version__"]
setup(
name="datasette",
version=get_version(),
description="An open source multi-tool for exploring and publishing data",
long_description=get_long_description(),
long_description_content_type="text/markdown",
author="Simon Willison",
license="Apache License, Version 2.0",
url="https://datasette.io/",
project_urls={
"Documentation": "https://docs.datasette.io/en/stable/",
"Changelog": "https://docs.datasette.io/en/stable/changelog.html",
"Live demo": "https://latest.datasette.io/",
"Source code": "https://github.com/simonw/datasette",
"Issues": "https://github.com/simonw/datasette/issues",
"CI": "https://github.com/simonw/datasette/actions?query=workflow%3ATest",
},
packages=find_packages(exclude=("tests",)),
package_data={"datasette": ["templates/*.html"]},
include_package_data=True,
python_requires=">=3.10",
install_requires=[
"asgiref>=3.2.10",
"click>=7.1.1",
"click-default-group>=1.2.3",
"Jinja2>=2.10.3",
"hupper>=1.9",
"httpx>=0.20",
'importlib_metadata>=4.6; python_version < "3.10"',
"pluggy>=1.0",
"uvicorn>=0.11",
"aiofiles>=0.4",
"janus>=0.6.2",
"asgi-csrf>=0.10",
"PyYAML>=5.3",
"mergedeep>=1.1.1",
"itsdangerous>=1.1",
"sqlite-utils>=3.30",
"asyncinject>=0.6.1",
"setuptools",
"pip",
],
entry_points="""
[console_scripts]
datasette=datasette.cli:cli
""",
extras_require={
"docs": [
"Sphinx==7.4.7",
"furo==2025.9.25",
"sphinx-autobuild",
"codespell>=2.2.5",
"blacken-docs",
"sphinx-copybutton",
"sphinx-inline-tabs",
"ruamel.yaml",
],
"test": [
"pytest>=5.2.2",
"pytest-xdist>=2.2.1",
"pytest-asyncio>=1.2.0",
"beautifulsoup4>=4.8.1",
"black==25.9.0",
"blacken-docs==1.20.0",
"pytest-timeout>=1.4.2",
"trustme>=0.7",
"cogapp>=3.3.0",
],
"rich": ["rich"],
},
classifiers=[
"Development Status :: 4 - Beta",
"Framework :: Datasette",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"Intended Audience :: End Users/Desktop",
"Topic :: Database",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python :: 3.14",
"Programming Language :: Python :: 3.13",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.10",
],
)

View file

@ -42,7 +42,9 @@ def wait_until_responds(url, timeout=5.0, client=httpx, **kwargs):
@pytest_asyncio.fixture
async def ds_client():
from datasette.app import Datasette
from datasette.database import Database
from .fixtures import CONFIG, METADATA, PLUGINS_DIR
import secrets
global _ds_client
if _ds_client is not None:
@ -63,7 +65,10 @@ async def ds_client():
)
from .fixtures import TABLES, TABLE_PARAMETERIZED_SQL
db = ds.add_memory_database("fixtures")
# Use a unique memory_name to avoid collisions between different
# Datasette instances in the same process, but use "fixtures" for routing
unique_memory_name = f"fixtures_{secrets.token_hex(8)}"
db = ds.add_database(Database(ds, memory_name=unique_memory_name), name="fixtures")
ds.remove_database("_memory")
def prepare(conn):