diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index c11bfa2e..38b62995 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -32,3 +32,8 @@ jobs:
- name: Check if cog needs to be run
run: |
cog --check docs/*.rst
+ - name: Check if blacken-docs needs to be run
+ run: |
+ blacken-docs -l 60 docs/*.rst
+ # This fails if a diff was generated:
+ git diff-index --quiet HEAD --
diff --git a/docs/contributing.rst b/docs/contributing.rst
index b74f2f36..c193ba49 100644
--- a/docs/contributing.rst
+++ b/docs/contributing.rst
@@ -144,6 +144,15 @@ If any of your code does not conform to Black you can run this to automatically
All done! ✨ 🍰 ✨
1 file reformatted, 94 files left unchanged.
+.. _contributing_formatting_blacken_docs:
+
+blacken-docs
+~~~~~~~~~~~~
+
+The `blacken-docs `__ command applies Black formatting rules to code examples in the documentation. Run it like this::
+
+ blacken-docs -l 60 docs/*.rst
+
.. _contributing_formatting_prettier:
Prettier
diff --git a/docs/spatialite.rst b/docs/spatialite.rst
index d1b300b2..52b6747e 100644
--- a/docs/spatialite.rst
+++ b/docs/spatialite.rst
@@ -58,21 +58,28 @@ Here's a recipe for taking a table with existing latitude and longitude columns,
.. code-block:: python
import sqlite3
- conn = sqlite3.connect('museums.db')
+
+ conn = sqlite3.connect("museums.db")
# Lead the spatialite extension:
conn.enable_load_extension(True)
- conn.load_extension('/usr/local/lib/mod_spatialite.dylib')
+ conn.load_extension("/usr/local/lib/mod_spatialite.dylib")
# Initialize spatial metadata for this database:
- conn.execute('select InitSpatialMetadata(1)')
+ conn.execute("select InitSpatialMetadata(1)")
# Add a geometry column called point_geom to our museums table:
- conn.execute("SELECT AddGeometryColumn('museums', 'point_geom', 4326, 'POINT', 2);")
+ conn.execute(
+ "SELECT AddGeometryColumn('museums', 'point_geom', 4326, 'POINT', 2);"
+ )
# Now update that geometry column with the lat/lon points
- conn.execute('''
+ conn.execute(
+ """
UPDATE museums SET
point_geom = GeomFromText('POINT('||"longitude"||' '||"latitude"||')',4326);
- ''')
+ """
+ )
# Now add a spatial index to that column
- conn.execute('select CreateSpatialIndex("museums", "point_geom");')
+ conn.execute(
+ 'select CreateSpatialIndex("museums", "point_geom");'
+ )
# If you don't commit your changes will not be persisted:
conn.commit()
conn.close()
@@ -186,28 +193,37 @@ Here's Python code to create a SQLite database, enable SpatiaLite, create a plac
.. code-block:: python
import sqlite3
- conn = sqlite3.connect('places.db')
+
+ conn = sqlite3.connect("places.db")
# Enable SpatialLite extension
conn.enable_load_extension(True)
- conn.load_extension('/usr/local/lib/mod_spatialite.dylib')
+ conn.load_extension("/usr/local/lib/mod_spatialite.dylib")
# Create the masic countries table
- conn.execute('select InitSpatialMetadata(1)')
- conn.execute('create table places (id integer primary key, name text);')
+ conn.execute("select InitSpatialMetadata(1)")
+ conn.execute(
+ "create table places (id integer primary key, name text);"
+ )
# Add a MULTIPOLYGON Geometry column
- conn.execute("SELECT AddGeometryColumn('places', 'geom', 4326, 'MULTIPOLYGON', 2);")
+ conn.execute(
+ "SELECT AddGeometryColumn('places', 'geom', 4326, 'MULTIPOLYGON', 2);"
+ )
# Add a spatial index against the new column
conn.execute("SELECT CreateSpatialIndex('places', 'geom');")
# Now populate the table
from shapely.geometry.multipolygon import MultiPolygon
from shapely.geometry import shape
import requests
- geojson = requests.get('https://data.whosonfirst.org/404/227/475/404227475.geojson').json()
+
+ geojson = requests.get(
+ "https://data.whosonfirst.org/404/227/475/404227475.geojson"
+ ).json()
# Convert to "Well Known Text" format
- wkt = shape(geojson['geometry']).wkt
+ wkt = shape(geojson["geometry"]).wkt
# Insert and commit the record
- conn.execute("INSERT INTO places (id, name, geom) VALUES(null, ?, GeomFromText(?, 4326))", (
- "Wales", wkt
- ))
+ conn.execute(
+ "INSERT INTO places (id, name, geom) VALUES(null, ?, GeomFromText(?, 4326))",
+ ("Wales", wkt),
+ )
conn.commit()
Querying polygons using within()
diff --git a/docs/writing_plugins.rst b/docs/writing_plugins.rst
index bd60a4b6..89f7f5eb 100644
--- a/docs/writing_plugins.rst
+++ b/docs/writing_plugins.rst
@@ -18,9 +18,12 @@ The quickest way to start writing a plugin is to create a ``my_plugin.py`` file
from datasette import hookimpl
+
@hookimpl
def prepare_connection(conn):
- conn.create_function('hello_world', 0, lambda: 'Hello world!')
+ conn.create_function(
+ "hello_world", 0, lambda: "Hello world!"
+ )
If you save this in ``plugins/my_plugin.py`` you can then start Datasette like this::
@@ -60,22 +63,22 @@ The example consists of two files: a ``setup.py`` file that defines the plugin:
from setuptools import setup
- VERSION = '0.1'
+ VERSION = "0.1"
setup(
- name='datasette-plugin-demos',
- description='Examples of plugins for Datasette',
- author='Simon Willison',
- url='https://github.com/simonw/datasette-plugin-demos',
- license='Apache License, Version 2.0',
+ name="datasette-plugin-demos",
+ description="Examples of plugins for Datasette",
+ author="Simon Willison",
+ url="https://github.com/simonw/datasette-plugin-demos",
+ license="Apache License, Version 2.0",
version=VERSION,
- py_modules=['datasette_plugin_demos'],
+ py_modules=["datasette_plugin_demos"],
entry_points={
- 'datasette': [
- 'plugin_demos = datasette_plugin_demos'
+ "datasette": [
+ "plugin_demos = datasette_plugin_demos"
]
},
- install_requires=['datasette']
+ install_requires=["datasette"],
)
And a Python module file, ``datasette_plugin_demos.py``, that implements the plugin:
@@ -88,12 +91,14 @@ And a Python module file, ``datasette_plugin_demos.py``, that implements the plu
@hookimpl
def prepare_jinja2_environment(env):
- env.filters['uppercase'] = lambda u: u.upper()
+ env.filters["uppercase"] = lambda u: u.upper()
@hookimpl
def prepare_connection(conn):
- conn.create_function('random_integer', 2, random.randint)
+ conn.create_function(
+ "random_integer", 2, random.randint
+ )
Having built a plugin in this way you can turn it into an installable package using the following command::
@@ -123,11 +128,13 @@ To bundle the static assets for a plugin in the package that you publish to PyPI
.. code-block:: python
- package_data={
- 'datasette_plugin_name': [
- 'static/plugin.js',
- ],
- },
+ package_data = (
+ {
+ "datasette_plugin_name": [
+ "static/plugin.js",
+ ],
+ },
+ )
Where ``datasette_plugin_name`` is the name of the plugin package (note that it uses underscores, not hyphens) and ``static/plugin.js`` is the path within that package to the static file.
@@ -152,11 +159,13 @@ Templates should be bundled for distribution using the same ``package_data`` mec
.. code-block:: python
- package_data={
- 'datasette_plugin_name': [
- 'templates/my_template.html',
- ],
- },
+ package_data = (
+ {
+ "datasette_plugin_name": [
+ "templates/my_template.html",
+ ],
+ },
+ )
You can also use wildcards here such as ``templates/*.html``. See `datasette-edit-schema `__ for an example of this pattern.
diff --git a/setup.py b/setup.py
index e5dd55fd..7f0562fd 100644
--- a/setup.py
+++ b/setup.py
@@ -65,13 +65,14 @@ setup(
""",
setup_requires=["pytest-runner"],
extras_require={
- "docs": ["sphinx_rtd_theme", "sphinx-autobuild", "codespell"],
+ "docs": ["sphinx_rtd_theme", "sphinx-autobuild", "codespell", "blacken-docs"],
"test": [
"pytest>=5.2.2,<7.2.0",
"pytest-xdist>=2.2.1,<2.6",
"pytest-asyncio>=0.17,<0.19",
"beautifulsoup4>=4.8.1,<4.12.0",
"black==22.1.0",
+ "blacken-docs==1.12.1",
"pytest-timeout>=1.4.2,<2.2",
"trustme>=0.7,<0.10",
"cogapp>=3.3.0",