diff --git a/.github/workflows/deploy-latest.yml b/.github/workflows/deploy-latest.yml index 4f67b030..6907b438 100644 --- a/.github/workflows/deploy-latest.yml +++ b/.github/workflows/deploy-latest.yml @@ -25,7 +25,7 @@ jobs: name: Configure pip caching with: path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} + key: ${{ runner.os }}-pip-${{ hashFiles('**/pyproject.toml') }} restore-keys: | ${{ runner.os }}-pip- - name: Install Python dependencies diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 5acb4899..14bfaded 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -20,7 +20,7 @@ jobs: with: python-version: ${{ matrix.python-version }} cache: pip - cache-dependency-path: setup.py + cache-dependency-path: pyproject.toml - name: Install dependencies run: | pip install -e '.[test]' @@ -41,7 +41,7 @@ jobs: with: python-version: '3.13' cache: pip - cache-dependency-path: setup.py + cache-dependency-path: pyproject.toml - name: Install dependencies run: | pip install setuptools wheel build @@ -62,7 +62,7 @@ jobs: with: python-version: '3.10' cache: pip - cache-dependency-path: setup.py + cache-dependency-path: pyproject.toml - name: Install dependencies run: | python -m pip install -e .[docs] diff --git a/.github/workflows/spellcheck.yml b/.github/workflows/spellcheck.yml index 8a47fd2d..7c5370ce 100644 --- a/.github/workflows/spellcheck.yml +++ b/.github/workflows/spellcheck.yml @@ -15,7 +15,7 @@ jobs: with: python-version: '3.11' cache: 'pip' - cache-dependency-path: '**/setup.py' + cache-dependency-path: '**/pyproject.toml' - name: Install dependencies run: | pip install -e '.[docs]' diff --git a/.github/workflows/test-coverage.yml b/.github/workflows/test-coverage.yml index 22a69150..8d73b64d 100644 --- a/.github/workflows/test-coverage.yml +++ b/.github/workflows/test-coverage.yml @@ -21,7 +21,7 @@ jobs: with: python-version: '3.12' cache: 'pip' - cache-dependency-path: '**/setup.py' + cache-dependency-path: '**/pyproject.toml' - name: Install Python dependencies run: | python -m pip install --upgrade pip diff --git a/.github/workflows/test-pyodide.yml b/.github/workflows/test-pyodide.yml index 7357b30c..b490a9bf 100644 --- a/.github/workflows/test-pyodide.yml +++ b/.github/workflows/test-pyodide.yml @@ -18,7 +18,7 @@ jobs: with: python-version: "3.10" cache: 'pip' - cache-dependency-path: '**/setup.py' + cache-dependency-path: '**/pyproject.toml' - name: Cache Playwright browsers uses: actions/cache@v4 with: diff --git a/.github/workflows/test-sqlite-support.yml b/.github/workflows/test-sqlite-support.yml index 698aec8a..76ea138a 100644 --- a/.github/workflows/test-sqlite-support.yml +++ b/.github/workflows/test-sqlite-support.yml @@ -32,7 +32,7 @@ jobs: python-version: ${{ matrix.python-version }} allow-prereleases: true cache: pip - cache-dependency-path: setup.py + cache-dependency-path: pyproject.toml - name: Set up SQLite ${{ matrix.sqlite-version }} uses: asg017/sqlite-versions@71ea0de37ae739c33e447af91ba71dda8fcf22e6 with: diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e6d67b5c..1e5e03d2 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -19,7 +19,7 @@ jobs: python-version: ${{ matrix.python-version }} allow-prereleases: true cache: pip - cache-dependency-path: setup.py + cache-dependency-path: pyproject.toml - name: Build extension for --load-extension test run: |- (cd tests && gcc ext.c -fPIC -shared -o ext.so) diff --git a/.gitignore b/.gitignore index 277ff653..70e6bbeb 100644 --- a/.gitignore +++ b/.gitignore @@ -5,6 +5,9 @@ scratchpad .vscode +uv.lock +data.db + # We don't use Pipfile, so ignore them Pipfile Pipfile.lock @@ -123,4 +126,4 @@ node_modules # include it in source control. tests/*.dylib tests/*.so -tests/*.dll \ No newline at end of file +tests/*.dll diff --git a/Justfile b/Justfile index 3e7e467a..8e4d6066 100644 --- a/Justfile +++ b/Justfile @@ -5,39 +5,39 @@ export DATASETTE_SECRET := "not_a_secret" # Setup project @init: - pipenv run pip install -e '.[test,docs]' + uv sync --extra test --extra docs # Run pytest with supplied options -@test *options: - pipenv run pytest {{options}} +@test *options: init + uv run pytest -n auto {{options}} @codespell: - pipenv run codespell README.md --ignore-words docs/codespell-ignore-words.txt - pipenv run codespell docs/*.rst --ignore-words docs/codespell-ignore-words.txt - pipenv run codespell datasette -S datasette/static --ignore-words docs/codespell-ignore-words.txt - pipenv run codespell tests --ignore-words docs/codespell-ignore-words.txt + uv run codespell README.md --ignore-words docs/codespell-ignore-words.txt + uv run codespell docs/*.rst --ignore-words docs/codespell-ignore-words.txt + uv run codespell datasette -S datasette/static --ignore-words docs/codespell-ignore-words.txt + uv run codespell tests --ignore-words docs/codespell-ignore-words.txt # Run linters: black, flake8, mypy, cog @lint: codespell - pipenv run black . --check - pipenv run flake8 - pipenv run cog --check README.md docs/*.rst + uv run black . --check + uv run flake8 + uv run cog --check README.md docs/*.rst # Rebuild docs with cog @cog: - pipenv run cog -r README.md docs/*.rst + uv run cog -r README.md docs/*.rst # Serve live docs on localhost:8000 @docs: cog blacken-docs - cd docs && pipenv run make livehtml + cd docs && uv run make livehtml # Apply Black @black: - pipenv run black . + uv run black . # Apply blacken-docs @blacken-docs: - pipenv run blacken-docs -l 60 docs/*.rst + uv run blacken-docs -l 60 docs/*.rst # Apply prettier @prettier: @@ -46,7 +46,7 @@ export DATASETTE_SECRET := "not_a_secret" # Format code with both black and prettier @format: black prettier blacken-docs -@serve: - pipenv run sqlite-utils create-database data.db - pipenv run sqlite-utils create-table data.db docs id integer title text --pk id --ignore - pipenv run python -m datasette data.db --root --reload +@serve *options: + uv run sqlite-utils create-database data.db + uv run sqlite-utils create-table data.db docs id integer title text --pk id --ignore + uv run python -m datasette data.db --root --reload {{options}} diff --git a/docs/contributing.rst b/docs/contributing.rst index 12c8d477..4771aa11 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -42,7 +42,7 @@ The next step is to create a virtual environment for your project and use it to # Install Datasette and its testing dependencies python3 -m pip install -e '.[test]' -That last line does most of the work: ``pip install -e`` means "install this package in a way that allows me to edit the source code in place". The ``.[test]`` option means "use the setup.py in this directory and install the optional testing dependencies as well". +That last line does most of the work: ``pip install -e`` means "install this package in a way that allows me to edit the source code in place". The ``.[test]`` option means "install the optional testing dependencies as well". .. _contributing_running_tests: @@ -160,7 +160,7 @@ If any of your code does not conform to Black you can run this to automatically :: - reformatted ../datasette/setup.py + reformatted ../datasette/app.py All done! ✨ 🍰 ✨ 1 file reformatted, 94 files left unchanged. diff --git a/docs/plugin_hooks.rst b/docs/plugin_hooks.rst index b1615e27..a06d3b4c 100644 --- a/docs/plugin_hooks.rst +++ b/docs/plugin_hooks.rst @@ -691,7 +691,7 @@ Help text (from the docstring for the function plus any defined Click arguments Plugins can register multiple commands by making multiple calls to the ``@cli.command()`` decorator. Consult the `Click documentation `__ for full details on how to build a CLI command, including how to define arguments and options. -Note that ``register_commands()`` plugins cannot used with the :ref:`--plugins-dir mechanism ` - they need to be installed into the same virtual environment as Datasette using ``pip install``. Provided it has a ``setup.py`` file (see :ref:`writing_plugins_packaging`) you can run ``pip install`` directly against the directory in which you are developing your plugin like so:: +Note that ``register_commands()`` plugins cannot used with the :ref:`--plugins-dir mechanism ` - they need to be installed into the same virtual environment as Datasette using ``pip install``. Provided it has a ``pyproject.toml`` file (see :ref:`writing_plugins_packaging`) you can run ``pip install`` directly against the directory in which you are developing your plugin like so:: pip install -e path/to/my/datasette-plugin diff --git a/docs/testing_plugins.rst b/docs/testing_plugins.rst index f1363fb4..e4fad500 100644 --- a/docs/testing_plugins.rst +++ b/docs/testing_plugins.rst @@ -33,16 +33,16 @@ You can install these packages like so:: pip install pytest pytest-asyncio -If you are building an installable package you can add them as test dependencies to your ``setup.py`` module like this: +If you are building an installable package you can add them as test dependencies to your ``pyproject.toml`` file like this: -.. code-block:: python +.. code-block:: toml - setup( - name="datasette-my-plugin", - # ... - extras_require={"test": ["pytest", "pytest-asyncio"]}, - tests_require=["datasette-my-plugin[test]"], - ) + [project] + name = "datasette-my-plugin" + # ... + + [project.optional-dependencies] + test = ["pytest", "pytest-asyncio"] You can then install the test dependencies like so:: diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..1536c09b --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,93 @@ +[project] +name = "datasette" +dynamic = ["version"] +description = "An open source multi-tool for exploring and publishing data" +readme = { file = "README.md", content-type = "text/markdown" } +authors = [ + { name = "Simon Willison" }, +] +license = "Apache-2.0" +requires-python = ">=3.10" +classifiers = [ + "Development Status :: 4 - Beta", + "Framework :: Datasette", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "Intended Audience :: End Users/Desktop", + "Topic :: Database", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", +] + +dependencies = [ + "asgiref>=3.2.10", + "click>=7.1.1", + "click-default-group>=1.2.3", + "Jinja2>=2.10.3", + "hupper>=1.9", + "httpx>=0.20", + "pluggy>=1.0", + "uvicorn>=0.11", + "aiofiles>=0.4", + "janus>=0.6.2", + "asgi-csrf>=0.10", + "PyYAML>=5.3", + "mergedeep>=1.1.1", + "itsdangerous>=1.1", + "sqlite-utils>=3.30", + "asyncinject>=0.6.1", + "setuptools", + "pip", +] + +[project.urls] +Homepage = "https://datasette.io/" +Documentation = "https://docs.datasette.io/en/stable/" +Changelog = "https://docs.datasette.io/en/stable/changelog.html" +"Live demo" = "https://latest.datasette.io/" +"Source code" = "https://github.com/simonw/datasette" +Issues = "https://github.com/simonw/datasette/issues" +CI = "https://github.com/simonw/datasette/actions?query=workflow%3ATest" + +[project.scripts] +datasette = "datasette.cli:cli" + +[project.optional-dependencies] +docs = [ + "Sphinx==7.4.7", + "furo==2025.9.25", + "sphinx-autobuild", + "codespell>=2.2.5", + "blacken-docs", + "sphinx-copybutton", + "sphinx-inline-tabs", + "ruamel.yaml", +] +test = [ + "pytest>=5.2.2", + "pytest-xdist>=2.2.1", + "pytest-asyncio>=1.2.0", + "beautifulsoup4>=4.8.1", + "black==25.9.0", + "blacken-docs==1.20.0", + "pytest-timeout>=1.4.2", + "trustme>=0.7", + "cogapp>=3.3.0", +] +rich = ["rich"] + +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" + +[tool.setuptools.packages.find] +include = ["datasette*"] + +[tool.setuptools.package-data] +datasette = ["templates/*.html"] + +[tool.setuptools.dynamic] +version = {attr = "datasette.version.__version__"} diff --git a/setup.py b/setup.py deleted file mode 100644 index 7b7a7747..00000000 --- a/setup.py +++ /dev/null @@ -1,107 +0,0 @@ -from setuptools import setup, find_packages -import os - - -def get_long_description(): - with open( - os.path.join(os.path.dirname(os.path.abspath(__file__)), "README.md"), - encoding="utf8", - ) as fp: - return fp.read() - - -def get_version(): - path = os.path.join( - os.path.dirname(os.path.abspath(__file__)), "datasette", "version.py" - ) - g = {} - with open(path) as fp: - exec(fp.read(), g) - return g["__version__"] - - -setup( - name="datasette", - version=get_version(), - description="An open source multi-tool for exploring and publishing data", - long_description=get_long_description(), - long_description_content_type="text/markdown", - author="Simon Willison", - license="Apache License, Version 2.0", - url="https://datasette.io/", - project_urls={ - "Documentation": "https://docs.datasette.io/en/stable/", - "Changelog": "https://docs.datasette.io/en/stable/changelog.html", - "Live demo": "https://latest.datasette.io/", - "Source code": "https://github.com/simonw/datasette", - "Issues": "https://github.com/simonw/datasette/issues", - "CI": "https://github.com/simonw/datasette/actions?query=workflow%3ATest", - }, - packages=find_packages(exclude=("tests",)), - package_data={"datasette": ["templates/*.html"]}, - include_package_data=True, - python_requires=">=3.10", - install_requires=[ - "asgiref>=3.2.10", - "click>=7.1.1", - "click-default-group>=1.2.3", - "Jinja2>=2.10.3", - "hupper>=1.9", - "httpx>=0.20", - 'importlib_metadata>=4.6; python_version < "3.10"', - "pluggy>=1.0", - "uvicorn>=0.11", - "aiofiles>=0.4", - "janus>=0.6.2", - "asgi-csrf>=0.10", - "PyYAML>=5.3", - "mergedeep>=1.1.1", - "itsdangerous>=1.1", - "sqlite-utils>=3.30", - "asyncinject>=0.6.1", - "setuptools", - "pip", - ], - entry_points=""" - [console_scripts] - datasette=datasette.cli:cli - """, - extras_require={ - "docs": [ - "Sphinx==7.4.7", - "furo==2025.9.25", - "sphinx-autobuild", - "codespell>=2.2.5", - "blacken-docs", - "sphinx-copybutton", - "sphinx-inline-tabs", - "ruamel.yaml", - ], - "test": [ - "pytest>=5.2.2", - "pytest-xdist>=2.2.1", - "pytest-asyncio>=1.2.0", - "beautifulsoup4>=4.8.1", - "black==25.9.0", - "blacken-docs==1.20.0", - "pytest-timeout>=1.4.2", - "trustme>=0.7", - "cogapp>=3.3.0", - ], - "rich": ["rich"], - }, - classifiers=[ - "Development Status :: 4 - Beta", - "Framework :: Datasette", - "Intended Audience :: Developers", - "Intended Audience :: Science/Research", - "Intended Audience :: End Users/Desktop", - "Topic :: Database", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.14", - "Programming Language :: Python :: 3.13", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.10", - ], -) diff --git a/tests/conftest.py b/tests/conftest.py index 4749fe6a..31c45ed3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -42,7 +42,9 @@ def wait_until_responds(url, timeout=5.0, client=httpx, **kwargs): @pytest_asyncio.fixture async def ds_client(): from datasette.app import Datasette + from datasette.database import Database from .fixtures import CONFIG, METADATA, PLUGINS_DIR + import secrets global _ds_client if _ds_client is not None: @@ -63,7 +65,10 @@ async def ds_client(): ) from .fixtures import TABLES, TABLE_PARAMETERIZED_SQL - db = ds.add_memory_database("fixtures") + # Use a unique memory_name to avoid collisions between different + # Datasette instances in the same process, but use "fixtures" for routing + unique_memory_name = f"fixtures_{secrets.token_hex(8)}" + db = ds.add_database(Database(ds, memory_name=unique_memory_name), name="fixtures") ds.remove_database("_memory") def prepare(conn):